- *addr*: string 监听端口 例:`0.0.0.0:8081`
- *matchRule*: string 匹配规则,默认`prefix`。 `prefix`:当未匹配到时,返回最近的/匹配, `all`:当未匹配到时,返回404
- reqIdLoop: uint 请求id环大小,用于日志识别请求,默认`1000`
-- *copyBlocks*: int 转发的块数量,默认`1000`
+- *copyBlocks*:{} 转发的块
+ - *size*: string 转发的块大小,默认`16K`
+ - *num*: int 转发的块数量,默认`1000`
- *retryBlocks*: {} 重试, 当停用时,不进行重试。其他情况:1.当所有块都在使用中时,不进行重试。2.当请求没有`Content-Length`时,将会重试。
- *size*: string 重试的块大小,默认`1M`
- *num*: int 重试的块数量,默认`0`,为`0`时停用重试
- errBanSec: int 当后端错误时(指连接失败,不指后端错误响应),将会禁用若干秒
- insecureSkipVerify: bool 忽略不安全的tls证书
- verifyPeerCer: string 路径,校验服务器证书,使用intermediate_ca
-- proxy: string 使用proxy进行请求,支持`socks5:\\`,`http:\\`,`https:\\`
+- proxy: string 使用proxy进行请求,支持`socks5:\\`,`http:\\`,`https:\\`(仅http、https、ws、wss有效)
-- filiter:
- - reqUri: 请求后端前,请求路径过滤器
- - accessRule: 布尔表达式,为true时才通过,例`{id}|(!{id2}&{id3})`
+- filiter: {}
+ - reqUri:{} 请求后端前,请求路径过滤器
+ - accessRule:string 布尔表达式,为true时才通过,例`{id}|(!{id2}&{id3})`
- items: map[string]string
- id: matchExp
- - reqHeader: 请求后端前,请求头处理器
- - accessRule: 布尔表达式,为true时才通过
+ - reqHeader:{} 请求后端前,请求头处理器
+ - accessRule:string 布尔表达式,为true时才通过
- items: map[string]{}
- id:
- key: string header头
- matchExp: string
- - resHeader: 返回后端的响应前,请求头处理器
- - accessRule: 布尔表达式,为true时才通过
+ - resHeader:{} 返回后端的响应前,请求头处理器
+ - accessRule:string 布尔表达式,为true时才通过
- items: map[string]{}
- id:
- key: string header头
- matchExp: string
- - reqBody: 请求后端前,请求数据过滤器(仅route层有效)
+ - reqBody:{} 请求后端前,请求数据过滤器(仅route层有效)
- action: string 可选`access`、`deny`
- reqSize: string 限定请求数据大小,默认为`1M`
- matchExp: string `access`时如不匹配将结束请求。`deny`时如匹配将结束请求
-- dealer:
+- dealer: {}
- reqUri:[] 请求后端前,路径处理器
- action: string 可选`replace`。
- matchExp: string `replace`时结合value进行替换
- key: string 具体处理哪个头
- matchExp: string `replace`时结合value进行替换
- value: string `replace`时结合matchExp进行替换。`add`时将附加值。`set`时将覆盖值。
+ - resBody:[] 返回后端响应前,数据处理器(仅http、https有效),使用转发块进行处理
+ - action: string 可选`replace`。
+ - matchExp: string `replace`时结合value进行替换
+ - value: string `replace`时结合matchExp进行替换。
Pub string `json:"pub"`
Key string `json:"key"`
} `json:"tls"`
- RetryBlocks RetryBlocks `json:"retryBlocks"`
+ RetryBlocks Blocks `json:"retryBlocks"`
RetryBlocksI pslice.BlocksI[byte] `json:"-"`
MatchRule string `json:"matchRule"`
FdPath string `json:"fdPath"`
- CopyBlocks int `json:"copyBlocks"`
+ CopyBlocks Blocks `json:"copyBlocks"`
BlocksI pslice.BlocksI[byte] `json:"-"`
routeP pweb.WebPath
reqId atomic.Uint32 `json:"-"`
}
-type RetryBlocks struct {
+type Blocks struct {
Size string `json:"size"`
size int `json:"-"`
Num int `json:"num"`
}
if t.BlocksI == nil {
- if t.CopyBlocks == 0 {
- t.CopyBlocks = 1000
+ if t.CopyBlocks.Num == 0 {
+ t.CopyBlocks.Num = 1000
}
- t.BlocksI = pslice.NewBlocks[byte](16*1024, t.CopyBlocks)
+ if size, err := humanize.ParseBytes(t.CopyBlocks.Size); err == nil && size > 0 {
+ t.CopyBlocks.size = int(size)
+ } else {
+ t.CopyBlocks.size = humanize.KByte * 16
+ }
+ t.BlocksI = pslice.NewBlocks[byte](t.CopyBlocks.size, t.CopyBlocks.Num)
}
if size, err := humanize.ParseBytes(t.RetryBlocks.Size); err == nil && size > 0 {
t.RetryBlocks.size = int(size)
} else {
t.verifyPeerCer, t.verifyPeerCerErr = os.ReadFile(path)
}
- if t.Proxy == "" {
- t.Proxy = t.route.Proxy
- }
if t.lastChosenT.IsZero() {
t.lastChosenT = time.Now()
}
t.AlwaysUp = len(t.route.Backs) == 1 || t.AlwaysUp
}
+func (t *Back) getProxy() string {
+ if t.Proxy == "" {
+ return t.route.Proxy
+ }
+ return t.Proxy
+}
func (t *Back) getSplicing() int {
if t.Splicing == 0 {
return t.route.Splicing
func (t *Back) getDealerResHeader() []dealer.HeaderDealer {
return append(t.route.Dealer.ResHeader, t.Dealer.ResHeader...)
}
+func (t *Back) getDealerResBody() []dealer.Body {
+ return append(t.route.Dealer.ResBody, t.Dealer.ResBody...)
+}
func (t *Back) Id() string {
return fmt.Sprintf("%p", t)
"testing"
"time"
+ "github.com/qydysky/front/dealer"
"github.com/qydysky/front/filiter"
plog "github.com/qydysky/part/log"
reqf "github.com/qydysky/part/reqf"
})
conf := &Config{
- RetryBlocks: RetryBlocks{
+ RetryBlocks: Blocks{
Num: 10,
Size: "3B",
},
})
conf := &Config{
- RetryBlocks: RetryBlocks{
+ RetryBlocks: Blocks{
Num: 10,
Size: "3B",
},
})
conf := &Config{
- RetryBlocks: RetryBlocks{
+ RetryBlocks: Blocks{
Num: 10,
Size: "3B",
},
t.Fatal()
}
}
+
+func Test_ResBody(t *testing.T) {
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ pweb.New(&http.Server{
+ Addr: "127.0.0.1:19001",
+ }).Handle(map[string]func(http.ResponseWriter, *http.Request){
+ `/`: func(w http.ResponseWriter, r *http.Request) {
+ io.Copy(w, r.Body)
+ },
+ })
+
+ conf := &Config{
+ RetryBlocks: Blocks{
+ Num: 10,
+ Size: "10B",
+ },
+ Addr: "127.0.0.1:19000",
+ Routes: []Route{
+ {
+ Path: []string{"/"},
+ PathAdd: true,
+ RollRule: "order",
+ Setting: Setting{
+ Dealer: dealer.Dealer{
+ ResBody: []dealer.Body{
+ {
+ Action: "replace",
+ MatchExp: "23",
+ Value: "ab",
+ },
+ },
+ },
+ },
+ Backs: []Back{
+ {
+ Name: "1",
+ To: "://127.0.0.1:19001",
+ Weight: 1,
+ },
+ },
+ },
+ },
+ }
+
+ go conf.Run(ctx, logger)
+
+ time.Sleep(time.Second)
+
+ r := reqf.New()
+ if e := r.Reqf(reqf.Rval{
+ Url: "http://127.0.0.1:19000/",
+ PostStr: "12345",
+ }); e != nil {
+ t.Fatal()
+ }
+ if !bytes.Equal(r.Respon, []byte("1ab45")) {
+ t.Fatal(r.Respon)
+ }
+}
--- /dev/null
+package dealer
+
+import (
+ "regexp"
+)
+
+type Body struct {
+ Action string `json:"action"`
+ MatchExp string `json:"matchExp"`
+ Value string `json:"value"`
+}
+
+func (t *Body) Valid() bool {
+ return t.MatchExp != ""
+}
+
+func (t *Body) GetReplaceDealer() (f func(data []byte) (dealed []byte, stop bool)) {
+ f = func(data []byte) (dealed []byte, stop bool) {
+ dealed = data
+ return
+ }
+ if !t.Valid() {
+ return
+ }
+ if exp, e := regexp.Compile(t.MatchExp); e == nil {
+ return func(data []byte) (dealed []byte, stop bool) {
+ dealed = exp.ReplaceAll(data, []byte(t.Value))
+ stop = false
+ return
+ }
+ } else {
+ return
+ }
+}
ReqUri []UriDealer `json:"reqUri"`
ReqHeader []HeaderDealer `json:"reqHeader"`
ResHeader []HeaderDealer `json:"resHeader"`
+ ResBody []Body `json:"resBody"`
}
package dealer
-import "github.com/qydysky/front/filiter"
-
type HeaderDealer struct {
- filiter.HeaderFiliter
- Action string `json:"action"`
- Value string `json:"value"`
+ Key string `json:"key"`
+ MatchExp string `json:"matchExp"`
+ Action string `json:"action"`
+ Value string `json:"value"`
}
require (
github.com/dustin/go-humanize v1.0.1
github.com/gorilla/websocket v1.5.3
- github.com/qydysky/part v0.28.20250420070720
+ github.com/qydysky/brotli v0.0.0-20240828134800-e9913a6e7ed9
+ github.com/qydysky/part v0.28.20250421162339
golang.org/x/net v0.39.0
)
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/qydysky/brotli v0.0.0-20240828134800-e9913a6e7ed9 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/tklauser/go-sysconf v0.3.14 // indirect
github.com/tklauser/numcpus v0.9.0 // indirect
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/qydysky/brotli v0.0.0-20240828134800-e9913a6e7ed9 h1:k451T+bpsLr+Dq9Ujo+Qtx0iomRA1XXS5ttlEojvfuQ=
github.com/qydysky/brotli v0.0.0-20240828134800-e9913a6e7ed9/go.mod h1:cI8/gy/wjy2Eb+p2IUj2ZuDnC8R5Vrx3O0VMPvMvphA=
-github.com/qydysky/part v0.28.20250420070720 h1:8clOjzYMyRCZkIpW4dvL5BsnHeUujWvdIm1cCfRooGM=
-github.com/qydysky/part v0.28.20250420070720/go.mod h1:wp71PQdKYcg9jn9yDDvqC4shS/kzejyvFqbfUxuHocY=
+github.com/qydysky/part v0.28.20250421162339 h1:i/H7nRBmvRtNrjVgRnvHg75DYQmbwe88K6OutrHFogw=
+github.com/qydysky/part v0.28.20250421162339/go.mod h1:wp71PQdKYcg9jn9yDDvqC4shS/kzejyvFqbfUxuHocY=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
"io"
"net/http"
netUrl "net/url"
+ "strings"
"time"
_ "unsafe"
+ "compress/flate"
+ gzip "compress/gzip"
+
+ br "github.com/qydysky/brotli"
"github.com/qydysky/front/utils"
component2 "github.com/qydysky/part/component2"
pctx "github.com/qydysky/part/ctx"
+ pio "github.com/qydysky/part/io"
pslice "github.com/qydysky/part/slice"
)
InsecureSkipVerify: chosenBack.getInsecureSkipVerify(),
}
- if chosenBack.Proxy != "" {
+ if chosenBack.getProxy() != "" {
customTransport.Proxy = func(_ *http.Request) (*netUrl.URL, error) {
- return netUrl.Parse(chosenBack.Proxy)
+ return netUrl.Parse(chosenBack.getProxy())
}
}
return ErrCopy
} else {
defer put()
- if _, e = io.CopyBuffer(w, resp.Body, tmpbuf); e != nil {
+
+ var dealers []func(data []byte) (dealed []byte, stop bool)
+ for _, v := range chosenBack.getDealerResBody() {
+ switch v.Action {
+ case `replace`:
+ dealers = append(dealers, v.GetReplaceDealer())
+ default:
+ }
+ }
+ if len(dealers) > 0 {
+ var reader io.Reader
+ var writer io.Writer
+ var dealBody bool
+ switch strings.ToLower(resp.Header.Get("Content-Encoding")) {
+ case `br`:
+ reader = br.NewReader(resp.Body)
+ w1 := br.NewWriter(w)
+ writer = pio.RWC{
+ W: func(p []byte) (n int, err error) {
+ n, err = w1.Write(p)
+ w1.Flush()
+ return
+ },
+ }
+ dealBody = true
+ case `gzip`:
+ if tmp, e := gzip.NewReader(resp.Body); e != nil {
+ logger.Error(`E:`, fmt.Sprintf(logFormat, reqId, r.RemoteAddr, chosenBack.route.config.Addr, routePath, chosenBack.Name, "BLOCK", e, time.Since(opT)))
+ return ErrCopy
+ } else {
+ reader = tmp
+ w1 := gzip.NewWriter(w)
+ writer = pio.RWC{
+ W: func(p []byte) (n int, err error) {
+ n, err = w1.Write(p)
+ w1.Flush()
+ return
+ },
+ }
+ }
+ dealBody = true
+ case `deflate`:
+ if tmp, e := flate.NewWriter(w, 1); e != nil {
+ logger.Error(`E:`, fmt.Sprintf(logFormat, reqId, r.RemoteAddr, chosenBack.route.config.Addr, routePath, chosenBack.Name, "BLOCK", e, time.Since(opT)))
+ return ErrCopy
+ } else {
+ reader = flate.NewReader(resp.Body)
+ writer = pio.RWC{
+ W: func(p []byte) (n int, err error) {
+ n, err = tmp.Write(p)
+ tmp.Flush()
+ return
+ },
+ }
+ }
+ dealBody = true
+ case ``:
+ reader = resp.Body
+ writer = w
+ dealBody = true
+ default:
+ reader = resp.Body
+ writer = w
+ }
+ if dealBody {
+ if e := pio.CopyDealer(writer, reader, tmpbuf, dealers...); e != nil {
+ logger.Error(`E:`, fmt.Sprintf(logFormat, reqId, r.RemoteAddr, chosenBack.route.config.Addr, routePath, chosenBack.Name, "BLOCK", e, time.Since(opT)))
+ return ErrCopy
+ }
+ } else if _, e = io.CopyBuffer(w, resp.Body, tmpbuf); e != nil {
+ logger.Error(`E:`, fmt.Sprintf(logFormat, reqId, r.RemoteAddr, chosenBack.route.config.Addr, routePath, chosenBack.Name, "BLOCK", e, time.Since(opT)))
+ if !errors.Is(e, context.Canceled) {
+ chosenBack.Disable()
+ }
+ return ErrCopy
+ }
+ } else if _, e = io.CopyBuffer(w, resp.Body, tmpbuf); e != nil {
logger.Error(`E:`, fmt.Sprintf(logFormat, reqId, r.RemoteAddr, chosenBack.route.config.Addr, routePath, chosenBack.Name, "BLOCK", e, time.Since(opT)))
if !errors.Is(e, context.Canceled) {
chosenBack.Disable()
sm := (map[string][]string)(s)
tm := (map[string][]string)(t)
for k, v := range sm {
- if strings.ToLower(k) == "origin" {
+ if strings.ToLower(k) == "origin" || strings.ToLower(k) == "content-length" {
continue
}
if strings.ToLower(k) == "set-cookie" {
for _, cookie := range v {
cookieSlice := strings.Split(cookie, ";")
for cookieK, cookieV := range cookieSlice {
- if strings.Contains(strings.ToLower(cookieV), "domain=") {
+ if strings.HasPrefix(strings.TrimSpace(strings.ToLower(cookieV)), "domain=") {
cookieSlice = slices.Delete(cookieSlice, cookieK, cookieK+1)
}
}
{
"addr": "127.0.0.1:0",
"matchRule": "prefix",
- "copyBlocks": 1000,
"tls": {
"pub": "",
"key": ""
},
"retryBlocks": {
- "sizeB": 10,
+ "size": 10,
"num": 10
},
"routes": [
}
// If needed, wrap the dial function to connect through a proxy.
- if chosenBack.Proxy != "" {
- proxyURL, err := url.Parse(chosenBack.Proxy)
+ if chosenBack.getProxy() != "" {
+ proxyURL, err := url.Parse(chosenBack.getProxy())
if err != nil {
return nil, nil, err
}