mirror of
https://github.com/SagerNet/sing-box.git
synced 2025-06-13 21:54:13 +08:00
Merge branch 'dev-next' into origin/tls-client-auth
Signed-off-by: jose-C2OaWi <111356383+jose-C2OaWi@users.noreply.github.com>
This commit is contained in:
commit
fbd3b23116
@ -46,12 +46,25 @@ type InboundContext struct {
|
||||
SourceGeoIPCode string
|
||||
GeoIPCode string
|
||||
ProcessInfo *process.Info
|
||||
QueryType uint16
|
||||
FakeIP bool
|
||||
IPCIDRMatchSource bool
|
||||
|
||||
// dns cache
|
||||
// rule cache
|
||||
|
||||
QueryType uint16
|
||||
IPCIDRMatchSource bool
|
||||
SourceAddressMatch bool
|
||||
SourcePortMatch bool
|
||||
DestinationAddressMatch bool
|
||||
DestinationPortMatch bool
|
||||
}
|
||||
|
||||
func (c *InboundContext) ResetRuleCache() {
|
||||
c.IPCIDRMatchSource = false
|
||||
c.SourceAddressMatch = false
|
||||
c.SourcePortMatch = false
|
||||
c.DestinationAddressMatch = false
|
||||
c.DestinationPortMatch = false
|
||||
}
|
||||
|
||||
type inboundContextKey struct{}
|
||||
|
@ -17,6 +17,7 @@ import (
|
||||
|
||||
type Router interface {
|
||||
Service
|
||||
PostStarter
|
||||
|
||||
Outbounds() []Outbound
|
||||
Outbound(tag string) (Outbound, bool)
|
||||
@ -86,6 +87,7 @@ type DNSRule interface {
|
||||
|
||||
type RuleSet interface {
|
||||
StartContext(ctx context.Context, startContext RuleSetStartContext) error
|
||||
PostStart() error
|
||||
Close() error
|
||||
HeadlessRule
|
||||
}
|
||||
|
4
box.go
4
box.go
@ -307,6 +307,10 @@ func (s *Box) postStart() error {
|
||||
}
|
||||
}
|
||||
}
|
||||
err := s.router.PostStart()
|
||||
if err != nil {
|
||||
return E.Cause(err, "post-start router")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
package badjsonmerge
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
|
||||
"github.com/sagernet/sing-box/common/badjson"
|
||||
"github.com/sagernet/sing-box/common/json"
|
||||
"github.com/sagernet/sing-box/option"
|
||||
E "github.com/sagernet/sing/common/exceptions"
|
||||
)
|
||||
|
3
common/contextjson/README.md
Normal file
3
common/contextjson/README.md
Normal file
@ -0,0 +1,3 @@
|
||||
# contextjson
|
||||
|
||||
mod from go1.21.4
|
1325
common/contextjson/decode.go
Normal file
1325
common/contextjson/decode.go
Normal file
File diff suppressed because it is too large
Load Diff
49
common/contextjson/decode_context.go
Normal file
49
common/contextjson/decode_context.go
Normal file
@ -0,0 +1,49 @@
|
||||
package json
|
||||
|
||||
import "strconv"
|
||||
|
||||
type decodeContext struct {
|
||||
parent *decodeContext
|
||||
index int
|
||||
key string
|
||||
}
|
||||
|
||||
func (d *decodeState) formatContext() string {
|
||||
var description string
|
||||
context := d.context
|
||||
var appendDot bool
|
||||
for context != nil {
|
||||
if appendDot {
|
||||
description = "." + description
|
||||
}
|
||||
if context.key != "" {
|
||||
description = context.key + description
|
||||
appendDot = true
|
||||
} else {
|
||||
description = "[" + strconv.Itoa(context.index) + "]" + description
|
||||
appendDot = false
|
||||
}
|
||||
context = context.parent
|
||||
}
|
||||
return description
|
||||
}
|
||||
|
||||
type contextError struct {
|
||||
parent error
|
||||
context string
|
||||
index bool
|
||||
}
|
||||
|
||||
func (c *contextError) Unwrap() error {
|
||||
return c.parent
|
||||
}
|
||||
|
||||
func (c *contextError) Error() string {
|
||||
//goland:noinspection GoTypeAssertionOnErrors
|
||||
switch c.parent.(type) {
|
||||
case *contextError:
|
||||
return c.context + "." + c.parent.Error()
|
||||
default:
|
||||
return c.context + ": " + c.parent.Error()
|
||||
}
|
||||
}
|
1283
common/contextjson/encode.go
Normal file
1283
common/contextjson/encode.go
Normal file
File diff suppressed because it is too large
Load Diff
48
common/contextjson/fold.go
Normal file
48
common/contextjson/fold.go
Normal file
@ -0,0 +1,48 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// foldName returns a folded string such that foldName(x) == foldName(y)
|
||||
// is identical to bytes.EqualFold(x, y).
|
||||
func foldName(in []byte) []byte {
|
||||
// This is inlinable to take advantage of "function outlining".
|
||||
var arr [32]byte // large enough for most JSON names
|
||||
return appendFoldedName(arr[:0], in)
|
||||
}
|
||||
|
||||
func appendFoldedName(out, in []byte) []byte {
|
||||
for i := 0; i < len(in); {
|
||||
// Handle single-byte ASCII.
|
||||
if c := in[i]; c < utf8.RuneSelf {
|
||||
if 'a' <= c && c <= 'z' {
|
||||
c -= 'a' - 'A'
|
||||
}
|
||||
out = append(out, c)
|
||||
i++
|
||||
continue
|
||||
}
|
||||
// Handle multi-byte Unicode.
|
||||
r, n := utf8.DecodeRune(in[i:])
|
||||
out = utf8.AppendRune(out, foldRune(r))
|
||||
i += n
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// foldRune is returns the smallest rune for all runes in the same fold set.
|
||||
func foldRune(r rune) rune {
|
||||
for {
|
||||
r2 := unicode.SimpleFold(r)
|
||||
if r2 <= r {
|
||||
return r2
|
||||
}
|
||||
r = r2
|
||||
}
|
||||
}
|
174
common/contextjson/indent.go
Normal file
174
common/contextjson/indent.go
Normal file
@ -0,0 +1,174 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import "bytes"
|
||||
|
||||
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
|
||||
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
|
||||
// so that the JSON will be safe to embed inside HTML <script> tags.
|
||||
// For historical reasons, web browsers don't honor standard HTML
|
||||
// escaping within <script> tags, so an alternative JSON encoding must be used.
|
||||
func HTMLEscape(dst *bytes.Buffer, src []byte) {
|
||||
dst.Grow(len(src))
|
||||
dst.Write(appendHTMLEscape(dst.AvailableBuffer(), src))
|
||||
}
|
||||
|
||||
func appendHTMLEscape(dst, src []byte) []byte {
|
||||
// The characters can only appear in string literals,
|
||||
// so just scan the string one byte at a time.
|
||||
start := 0
|
||||
for i, c := range src {
|
||||
if c == '<' || c == '>' || c == '&' {
|
||||
dst = append(dst, src[start:i]...)
|
||||
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
|
||||
start = i + 1
|
||||
}
|
||||
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
|
||||
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
|
||||
dst = append(dst, src[start:i]...)
|
||||
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
|
||||
start = i + len("\u2029")
|
||||
}
|
||||
}
|
||||
return append(dst, src[start:]...)
|
||||
}
|
||||
|
||||
// Compact appends to dst the JSON-encoded src with
|
||||
// insignificant space characters elided.
|
||||
func Compact(dst *bytes.Buffer, src []byte) error {
|
||||
dst.Grow(len(src))
|
||||
b := dst.AvailableBuffer()
|
||||
b, err := appendCompact(b, src, false)
|
||||
dst.Write(b)
|
||||
return err
|
||||
}
|
||||
|
||||
func appendCompact(dst, src []byte, escape bool) ([]byte, error) {
|
||||
origLen := len(dst)
|
||||
scan := newScanner()
|
||||
defer freeScanner(scan)
|
||||
start := 0
|
||||
for i, c := range src {
|
||||
if escape && (c == '<' || c == '>' || c == '&') {
|
||||
dst = append(dst, src[start:i]...)
|
||||
dst = append(dst, '\\', 'u', '0', '0', hex[c>>4], hex[c&0xF])
|
||||
start = i + 1
|
||||
}
|
||||
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
|
||||
if escape && c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
|
||||
dst = append(dst, src[start:i]...)
|
||||
dst = append(dst, '\\', 'u', '2', '0', '2', hex[src[i+2]&0xF])
|
||||
start = i + len("\u2029")
|
||||
}
|
||||
v := scan.step(scan, c)
|
||||
if v >= scanSkipSpace {
|
||||
if v == scanError {
|
||||
break
|
||||
}
|
||||
dst = append(dst, src[start:i]...)
|
||||
start = i + 1
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return dst[:origLen], scan.err
|
||||
}
|
||||
dst = append(dst, src[start:]...)
|
||||
return dst, nil
|
||||
}
|
||||
|
||||
func appendNewline(dst []byte, prefix, indent string, depth int) []byte {
|
||||
dst = append(dst, '\n')
|
||||
dst = append(dst, prefix...)
|
||||
for i := 0; i < depth; i++ {
|
||||
dst = append(dst, indent...)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
// indentGrowthFactor specifies the growth factor of indenting JSON input.
|
||||
// Empirically, the growth factor was measured to be between 1.4x to 1.8x
|
||||
// for some set of compacted JSON with the indent being a single tab.
|
||||
// Specify a growth factor slightly larger than what is observed
|
||||
// to reduce probability of allocation in appendIndent.
|
||||
// A factor no higher than 2 ensures that wasted space never exceeds 50%.
|
||||
const indentGrowthFactor = 2
|
||||
|
||||
// Indent appends to dst an indented form of the JSON-encoded src.
|
||||
// Each element in a JSON object or array begins on a new,
|
||||
// indented line beginning with prefix followed by one or more
|
||||
// copies of indent according to the indentation nesting.
|
||||
// The data appended to dst does not begin with the prefix nor
|
||||
// any indentation, to make it easier to embed inside other formatted JSON data.
|
||||
// Although leading space characters (space, tab, carriage return, newline)
|
||||
// at the beginning of src are dropped, trailing space characters
|
||||
// at the end of src are preserved and copied to dst.
|
||||
// For example, if src has no trailing spaces, neither will dst;
|
||||
// if src ends in a trailing newline, so will dst.
|
||||
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
|
||||
dst.Grow(indentGrowthFactor * len(src))
|
||||
b := dst.AvailableBuffer()
|
||||
b, err := appendIndent(b, src, prefix, indent)
|
||||
dst.Write(b)
|
||||
return err
|
||||
}
|
||||
|
||||
func appendIndent(dst, src []byte, prefix, indent string) ([]byte, error) {
|
||||
origLen := len(dst)
|
||||
scan := newScanner()
|
||||
defer freeScanner(scan)
|
||||
needIndent := false
|
||||
depth := 0
|
||||
for _, c := range src {
|
||||
scan.bytes++
|
||||
v := scan.step(scan, c)
|
||||
if v == scanSkipSpace {
|
||||
continue
|
||||
}
|
||||
if v == scanError {
|
||||
break
|
||||
}
|
||||
if needIndent && v != scanEndObject && v != scanEndArray {
|
||||
needIndent = false
|
||||
depth++
|
||||
dst = appendNewline(dst, prefix, indent, depth)
|
||||
}
|
||||
|
||||
// Emit semantically uninteresting bytes
|
||||
// (in particular, punctuation in strings) unmodified.
|
||||
if v == scanContinue {
|
||||
dst = append(dst, c)
|
||||
continue
|
||||
}
|
||||
|
||||
// Add spacing around real punctuation.
|
||||
switch c {
|
||||
case '{', '[':
|
||||
// delay indent so that empty object and array are formatted as {} and [].
|
||||
needIndent = true
|
||||
dst = append(dst, c)
|
||||
case ',':
|
||||
dst = append(dst, c)
|
||||
dst = appendNewline(dst, prefix, indent, depth)
|
||||
case ':':
|
||||
dst = append(dst, c, ' ')
|
||||
case '}', ']':
|
||||
if needIndent {
|
||||
// suppress indent in empty object/array
|
||||
needIndent = false
|
||||
} else {
|
||||
depth--
|
||||
dst = appendNewline(dst, prefix, indent, depth)
|
||||
}
|
||||
dst = append(dst, c)
|
||||
default:
|
||||
dst = append(dst, c)
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return dst[:origLen], scan.err
|
||||
}
|
||||
return dst, nil
|
||||
}
|
610
common/contextjson/scanner.go
Normal file
610
common/contextjson/scanner.go
Normal file
@ -0,0 +1,610 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
// JSON value parser state machine.
|
||||
// Just about at the limit of what is reasonable to write by hand.
|
||||
// Some parts are a bit tedious, but overall it nicely factors out the
|
||||
// otherwise common code from the multiple scanning functions
|
||||
// in this package (Compact, Indent, checkValid, etc).
|
||||
//
|
||||
// This file starts with two simple examples using the scanner
|
||||
// before diving into the scanner itself.
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
func Valid(data []byte) bool {
|
||||
scan := newScanner()
|
||||
defer freeScanner(scan)
|
||||
return checkValid(data, scan) == nil
|
||||
}
|
||||
|
||||
// checkValid verifies that data is valid JSON-encoded data.
|
||||
// scan is passed in for use by checkValid to avoid an allocation.
|
||||
// checkValid returns nil or a SyntaxError.
|
||||
func checkValid(data []byte, scan *scanner) error {
|
||||
scan.reset()
|
||||
for _, c := range data {
|
||||
scan.bytes++
|
||||
if scan.step(scan, c) == scanError {
|
||||
return scan.err
|
||||
}
|
||||
}
|
||||
if scan.eof() == scanError {
|
||||
return scan.err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A SyntaxError is a description of a JSON syntax error.
|
||||
// Unmarshal will return a SyntaxError if the JSON can't be parsed.
|
||||
type SyntaxError struct {
|
||||
msg string // description of error
|
||||
Offset int64 // error occurred after reading Offset bytes
|
||||
}
|
||||
|
||||
func (e *SyntaxError) Error() string { return e.msg }
|
||||
|
||||
// A scanner is a JSON scanning state machine.
|
||||
// Callers call scan.reset and then pass bytes in one at a time
|
||||
// by calling scan.step(&scan, c) for each byte.
|
||||
// The return value, referred to as an opcode, tells the
|
||||
// caller about significant parsing events like beginning
|
||||
// and ending literals, objects, and arrays, so that the
|
||||
// caller can follow along if it wishes.
|
||||
// The return value scanEnd indicates that a single top-level
|
||||
// JSON value has been completed, *before* the byte that
|
||||
// just got passed in. (The indication must be delayed in order
|
||||
// to recognize the end of numbers: is 123 a whole value or
|
||||
// the beginning of 12345e+6?).
|
||||
type scanner struct {
|
||||
// The step is a func to be called to execute the next transition.
|
||||
// Also tried using an integer constant and a single func
|
||||
// with a switch, but using the func directly was 10% faster
|
||||
// on a 64-bit Mac Mini, and it's nicer to read.
|
||||
step func(*scanner, byte) int
|
||||
|
||||
// Reached end of top-level value.
|
||||
endTop bool
|
||||
|
||||
// Stack of what we're in the middle of - array values, object keys, object values.
|
||||
parseState []int
|
||||
|
||||
// Error that happened, if any.
|
||||
err error
|
||||
|
||||
// total bytes consumed, updated by decoder.Decode (and deliberately
|
||||
// not set to zero by scan.reset)
|
||||
bytes int64
|
||||
}
|
||||
|
||||
var scannerPool = sync.Pool{
|
||||
New: func() any {
|
||||
return &scanner{}
|
||||
},
|
||||
}
|
||||
|
||||
func newScanner() *scanner {
|
||||
scan := scannerPool.Get().(*scanner)
|
||||
// scan.reset by design doesn't set bytes to zero
|
||||
scan.bytes = 0
|
||||
scan.reset()
|
||||
return scan
|
||||
}
|
||||
|
||||
func freeScanner(scan *scanner) {
|
||||
// Avoid hanging on to too much memory in extreme cases.
|
||||
if len(scan.parseState) > 1024 {
|
||||
scan.parseState = nil
|
||||
}
|
||||
scannerPool.Put(scan)
|
||||
}
|
||||
|
||||
// These values are returned by the state transition functions
|
||||
// assigned to scanner.state and the method scanner.eof.
|
||||
// They give details about the current state of the scan that
|
||||
// callers might be interested to know about.
|
||||
// It is okay to ignore the return value of any particular
|
||||
// call to scanner.state: if one call returns scanError,
|
||||
// every subsequent call will return scanError too.
|
||||
const (
|
||||
// Continue.
|
||||
scanContinue = iota // uninteresting byte
|
||||
scanBeginLiteral // end implied by next result != scanContinue
|
||||
scanBeginObject // begin object
|
||||
scanObjectKey // just finished object key (string)
|
||||
scanObjectValue // just finished non-last object value
|
||||
scanEndObject // end object (implies scanObjectValue if possible)
|
||||
scanBeginArray // begin array
|
||||
scanArrayValue // just finished array value
|
||||
scanEndArray // end array (implies scanArrayValue if possible)
|
||||
scanSkipSpace // space byte; can skip; known to be last "continue" result
|
||||
|
||||
// Stop.
|
||||
scanEnd // top-level value ended *before* this byte; known to be first "stop" result
|
||||
scanError // hit an error, scanner.err.
|
||||
)
|
||||
|
||||
// These values are stored in the parseState stack.
|
||||
// They give the current state of a composite value
|
||||
// being scanned. If the parser is inside a nested value
|
||||
// the parseState describes the nested state, outermost at entry 0.
|
||||
const (
|
||||
parseObjectKey = iota // parsing object key (before colon)
|
||||
parseObjectValue // parsing object value (after colon)
|
||||
parseArrayValue // parsing array value
|
||||
)
|
||||
|
||||
// This limits the max nesting depth to prevent stack overflow.
|
||||
// This is permitted by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxNestingDepth = 10000
|
||||
|
||||
// reset prepares the scanner for use.
|
||||
// It must be called before calling s.step.
|
||||
func (s *scanner) reset() {
|
||||
s.step = stateBeginValue
|
||||
s.parseState = s.parseState[0:0]
|
||||
s.err = nil
|
||||
s.endTop = false
|
||||
}
|
||||
|
||||
// eof tells the scanner that the end of input has been reached.
|
||||
// It returns a scan status just as s.step does.
|
||||
func (s *scanner) eof() int {
|
||||
if s.err != nil {
|
||||
return scanError
|
||||
}
|
||||
if s.endTop {
|
||||
return scanEnd
|
||||
}
|
||||
s.step(s, ' ')
|
||||
if s.endTop {
|
||||
return scanEnd
|
||||
}
|
||||
if s.err == nil {
|
||||
s.err = &SyntaxError{"unexpected end of JSON input", s.bytes}
|
||||
}
|
||||
return scanError
|
||||
}
|
||||
|
||||
// pushParseState pushes a new parse state p onto the parse stack.
|
||||
// an error state is returned if maxNestingDepth was exceeded, otherwise successState is returned.
|
||||
func (s *scanner) pushParseState(c byte, newParseState int, successState int) int {
|
||||
s.parseState = append(s.parseState, newParseState)
|
||||
if len(s.parseState) <= maxNestingDepth {
|
||||
return successState
|
||||
}
|
||||
return s.error(c, "exceeded max depth")
|
||||
}
|
||||
|
||||
// popParseState pops a parse state (already obtained) off the stack
|
||||
// and updates s.step accordingly.
|
||||
func (s *scanner) popParseState() {
|
||||
n := len(s.parseState) - 1
|
||||
s.parseState = s.parseState[0:n]
|
||||
if n == 0 {
|
||||
s.step = stateEndTop
|
||||
s.endTop = true
|
||||
} else {
|
||||
s.step = stateEndValue
|
||||
}
|
||||
}
|
||||
|
||||
func isSpace(c byte) bool {
|
||||
return c <= ' ' && (c == ' ' || c == '\t' || c == '\r' || c == '\n')
|
||||
}
|
||||
|
||||
// stateBeginValueOrEmpty is the state after reading `[`.
|
||||
func stateBeginValueOrEmpty(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == ']' {
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
return stateBeginValue(s, c)
|
||||
}
|
||||
|
||||
// stateBeginValue is the state at the beginning of the input.
|
||||
func stateBeginValue(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
switch c {
|
||||
case '{':
|
||||
s.step = stateBeginStringOrEmpty
|
||||
return s.pushParseState(c, parseObjectKey, scanBeginObject)
|
||||
case '[':
|
||||
s.step = stateBeginValueOrEmpty
|
||||
return s.pushParseState(c, parseArrayValue, scanBeginArray)
|
||||
case '"':
|
||||
s.step = stateInString
|
||||
return scanBeginLiteral
|
||||
case '-':
|
||||
s.step = stateNeg
|
||||
return scanBeginLiteral
|
||||
case '0': // beginning of 0.123
|
||||
s.step = state0
|
||||
return scanBeginLiteral
|
||||
case 't': // beginning of true
|
||||
s.step = stateT
|
||||
return scanBeginLiteral
|
||||
case 'f': // beginning of false
|
||||
s.step = stateF
|
||||
return scanBeginLiteral
|
||||
case 'n': // beginning of null
|
||||
s.step = stateN
|
||||
return scanBeginLiteral
|
||||
}
|
||||
if '1' <= c && c <= '9' { // beginning of 1234.5
|
||||
s.step = state1
|
||||
return scanBeginLiteral
|
||||
}
|
||||
return s.error(c, "looking for beginning of value")
|
||||
}
|
||||
|
||||
// stateBeginStringOrEmpty is the state after reading `{`.
|
||||
func stateBeginStringOrEmpty(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == '}' {
|
||||
n := len(s.parseState)
|
||||
s.parseState[n-1] = parseObjectValue
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
return stateBeginString(s, c)
|
||||
}
|
||||
|
||||
// stateBeginString is the state after reading `{"key": value,`.
|
||||
func stateBeginString(s *scanner, c byte) int {
|
||||
if isSpace(c) {
|
||||
return scanSkipSpace
|
||||
}
|
||||
if c == '"' {
|
||||
s.step = stateInString
|
||||
return scanBeginLiteral
|
||||
}
|
||||
return s.error(c, "looking for beginning of object key string")
|
||||
}
|
||||
|
||||
// stateEndValue is the state after completing a value,
|
||||
// such as after reading `{}` or `true` or `["x"`.
|
||||
func stateEndValue(s *scanner, c byte) int {
|
||||
n := len(s.parseState)
|
||||
if n == 0 {
|
||||
// Completed top-level before the current byte.
|
||||
s.step = stateEndTop
|
||||
s.endTop = true
|
||||
return stateEndTop(s, c)
|
||||
}
|
||||
if isSpace(c) {
|
||||
s.step = stateEndValue
|
||||
return scanSkipSpace
|
||||
}
|
||||
ps := s.parseState[n-1]
|
||||
switch ps {
|
||||
case parseObjectKey:
|
||||
if c == ':' {
|
||||
s.parseState[n-1] = parseObjectValue
|
||||
s.step = stateBeginValue
|
||||
return scanObjectKey
|
||||
}
|
||||
return s.error(c, "after object key")
|
||||
case parseObjectValue:
|
||||
if c == ',' {
|
||||
s.parseState[n-1] = parseObjectKey
|
||||
s.step = stateBeginString
|
||||
return scanObjectValue
|
||||
}
|
||||
if c == '}' {
|
||||
s.popParseState()
|
||||
return scanEndObject
|
||||
}
|
||||
return s.error(c, "after object key:value pair")
|
||||
case parseArrayValue:
|
||||
if c == ',' {
|
||||
s.step = stateBeginValue
|
||||
return scanArrayValue
|
||||
}
|
||||
if c == ']' {
|
||||
s.popParseState()
|
||||
return scanEndArray
|
||||
}
|
||||
return s.error(c, "after array element")
|
||||
}
|
||||
return s.error(c, "")
|
||||
}
|
||||
|
||||
// stateEndTop is the state after finishing the top-level value,
|
||||
// such as after reading `{}` or `[1,2,3]`.
|
||||
// Only space characters should be seen now.
|
||||
func stateEndTop(s *scanner, c byte) int {
|
||||
if !isSpace(c) {
|
||||
// Complain about non-space byte on next call.
|
||||
s.error(c, "after top-level value")
|
||||
}
|
||||
return scanEnd
|
||||
}
|
||||
|
||||
// stateInString is the state after reading `"`.
|
||||
func stateInString(s *scanner, c byte) int {
|
||||
if c == '"' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
if c == '\\' {
|
||||
s.step = stateInStringEsc
|
||||
return scanContinue
|
||||
}
|
||||
if c < 0x20 {
|
||||
return s.error(c, "in string literal")
|
||||
}
|
||||
return scanContinue
|
||||
}
|
||||
|
||||
// stateInStringEsc is the state after reading `"\` during a quoted string.
|
||||
func stateInStringEsc(s *scanner, c byte) int {
|
||||
switch c {
|
||||
case 'b', 'f', 'n', 'r', 't', '\\', '/', '"':
|
||||
s.step = stateInString
|
||||
return scanContinue
|
||||
case 'u':
|
||||
s.step = stateInStringEscU
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in string escape code")
|
||||
}
|
||||
|
||||
// stateInStringEscU is the state after reading `"\u` during a quoted string.
|
||||
func stateInStringEscU(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU1
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU1 is the state after reading `"\u1` during a quoted string.
|
||||
func stateInStringEscU1(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU12
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU12 is the state after reading `"\u12` during a quoted string.
|
||||
func stateInStringEscU12(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInStringEscU123
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateInStringEscU123 is the state after reading `"\u123` during a quoted string.
|
||||
func stateInStringEscU123(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
||||
s.step = stateInString
|
||||
return scanContinue
|
||||
}
|
||||
// numbers
|
||||
return s.error(c, "in \\u hexadecimal character escape")
|
||||
}
|
||||
|
||||
// stateNeg is the state after reading `-` during a number.
|
||||
func stateNeg(s *scanner, c byte) int {
|
||||
if c == '0' {
|
||||
s.step = state0
|
||||
return scanContinue
|
||||
}
|
||||
if '1' <= c && c <= '9' {
|
||||
s.step = state1
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in numeric literal")
|
||||
}
|
||||
|
||||
// state1 is the state after reading a non-zero integer during a number,
|
||||
// such as after reading `1` or `100` but not `0`.
|
||||
func state1(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = state1
|
||||
return scanContinue
|
||||
}
|
||||
return state0(s, c)
|
||||
}
|
||||
|
||||
// state0 is the state after reading `0` during a number.
|
||||
func state0(s *scanner, c byte) int {
|
||||
if c == '.' {
|
||||
s.step = stateDot
|
||||
return scanContinue
|
||||
}
|
||||
if c == 'e' || c == 'E' {
|
||||
s.step = stateE
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateDot is the state after reading the integer and decimal point in a number,
|
||||
// such as after reading `1.`.
|
||||
func stateDot(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = stateDot0
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "after decimal point in numeric literal")
|
||||
}
|
||||
|
||||
// stateDot0 is the state after reading the integer, decimal point, and subsequent
|
||||
// digits of a number, such as after reading `3.14`.
|
||||
func stateDot0(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
return scanContinue
|
||||
}
|
||||
if c == 'e' || c == 'E' {
|
||||
s.step = stateE
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateE is the state after reading the mantissa and e in a number,
|
||||
// such as after reading `314e` or `0.314e`.
|
||||
func stateE(s *scanner, c byte) int {
|
||||
if c == '+' || c == '-' {
|
||||
s.step = stateESign
|
||||
return scanContinue
|
||||
}
|
||||
return stateESign(s, c)
|
||||
}
|
||||
|
||||
// stateESign is the state after reading the mantissa, e, and sign in a number,
|
||||
// such as after reading `314e-` or `0.314e+`.
|
||||
func stateESign(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
s.step = stateE0
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in exponent of numeric literal")
|
||||
}
|
||||
|
||||
// stateE0 is the state after reading the mantissa, e, optional sign,
|
||||
// and at least one digit of the exponent in a number,
|
||||
// such as after reading `314e-2` or `0.314e+1` or `3.14e0`.
|
||||
func stateE0(s *scanner, c byte) int {
|
||||
if '0' <= c && c <= '9' {
|
||||
return scanContinue
|
||||
}
|
||||
return stateEndValue(s, c)
|
||||
}
|
||||
|
||||
// stateT is the state after reading `t`.
|
||||
func stateT(s *scanner, c byte) int {
|
||||
if c == 'r' {
|
||||
s.step = stateTr
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'r')")
|
||||
}
|
||||
|
||||
// stateTr is the state after reading `tr`.
|
||||
func stateTr(s *scanner, c byte) int {
|
||||
if c == 'u' {
|
||||
s.step = stateTru
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'u')")
|
||||
}
|
||||
|
||||
// stateTru is the state after reading `tru`.
|
||||
func stateTru(s *scanner, c byte) int {
|
||||
if c == 'e' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal true (expecting 'e')")
|
||||
}
|
||||
|
||||
// stateF is the state after reading `f`.
|
||||
func stateF(s *scanner, c byte) int {
|
||||
if c == 'a' {
|
||||
s.step = stateFa
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'a')")
|
||||
}
|
||||
|
||||
// stateFa is the state after reading `fa`.
|
||||
func stateFa(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateFal
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateFal is the state after reading `fal`.
|
||||
func stateFal(s *scanner, c byte) int {
|
||||
if c == 's' {
|
||||
s.step = stateFals
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 's')")
|
||||
}
|
||||
|
||||
// stateFals is the state after reading `fals`.
|
||||
func stateFals(s *scanner, c byte) int {
|
||||
if c == 'e' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal false (expecting 'e')")
|
||||
}
|
||||
|
||||
// stateN is the state after reading `n`.
|
||||
func stateN(s *scanner, c byte) int {
|
||||
if c == 'u' {
|
||||
s.step = stateNu
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'u')")
|
||||
}
|
||||
|
||||
// stateNu is the state after reading `nu`.
|
||||
func stateNu(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateNul
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateNul is the state after reading `nul`.
|
||||
func stateNul(s *scanner, c byte) int {
|
||||
if c == 'l' {
|
||||
s.step = stateEndValue
|
||||
return scanContinue
|
||||
}
|
||||
return s.error(c, "in literal null (expecting 'l')")
|
||||
}
|
||||
|
||||
// stateError is the state after reaching a syntax error,
|
||||
// such as after reading `[1}` or `5.1.2`.
|
||||
func stateError(s *scanner, c byte) int {
|
||||
return scanError
|
||||
}
|
||||
|
||||
// error records an error and switches to the error state.
|
||||
func (s *scanner) error(c byte, context string) int {
|
||||
s.step = stateError
|
||||
s.err = &SyntaxError{"invalid character " + quoteChar(c) + " " + context, s.bytes}
|
||||
return scanError
|
||||
}
|
||||
|
||||
// quoteChar formats c as a quoted character literal.
|
||||
func quoteChar(c byte) string {
|
||||
// special cases - different from quoted strings
|
||||
if c == '\'' {
|
||||
return `'\''`
|
||||
}
|
||||
if c == '"' {
|
||||
return `'"'`
|
||||
}
|
||||
|
||||
// use quoted string with different quotation marks
|
||||
s := strconv.Quote(string(c))
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
513
common/contextjson/stream.go
Normal file
513
common/contextjson/stream.go
Normal file
@ -0,0 +1,513 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
// A Decoder reads and decodes JSON values from an input stream.
|
||||
type Decoder struct {
|
||||
r io.Reader
|
||||
buf []byte
|
||||
d decodeState
|
||||
scanp int // start of unread data in buf
|
||||
scanned int64 // amount of data already scanned
|
||||
scan scanner
|
||||
err error
|
||||
|
||||
tokenState int
|
||||
tokenStack []int
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may
|
||||
// read data from r beyond the JSON values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
return &Decoder{r: r}
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (dec *Decoder) DisallowUnknownFields() { dec.d.disallowUnknownFields = true }
|
||||
|
||||
// Decode reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about
|
||||
// the conversion of JSON into a Go value.
|
||||
func (dec *Decoder) Decode(v any) error {
|
||||
if dec.err != nil {
|
||||
return dec.err
|
||||
}
|
||||
|
||||
if err := dec.tokenPrepareForDecode(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !dec.tokenValueAllowed() {
|
||||
return &SyntaxError{msg: "not at beginning of value", Offset: dec.InputOffset()}
|
||||
}
|
||||
|
||||
// Read whole value into buffer.
|
||||
n, err := dec.readValue()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dec.d.init(dec.buf[dec.scanp : dec.scanp+n])
|
||||
dec.scanp += n
|
||||
|
||||
// Don't save err from unmarshal into dec.err:
|
||||
// the connection is still usable since we read a complete JSON
|
||||
// object from it before the error happened.
|
||||
err = dec.d.unmarshal(v)
|
||||
|
||||
// fixup token streaming state
|
||||
dec.tokenValueEnd()
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's
|
||||
// buffer. The reader is valid until the next call to Decode.
|
||||
func (dec *Decoder) Buffered() io.Reader {
|
||||
return bytes.NewReader(dec.buf[dec.scanp:])
|
||||
}
|
||||
|
||||
// readValue reads a JSON value into dec.buf.
|
||||
// It returns the length of the encoding.
|
||||
func (dec *Decoder) readValue() (int, error) {
|
||||
dec.scan.reset()
|
||||
|
||||
scanp := dec.scanp
|
||||
var err error
|
||||
Input:
|
||||
// help the compiler see that scanp is never negative, so it can remove
|
||||
// some bounds checks below.
|
||||
for scanp >= 0 {
|
||||
|
||||
// Look in the buffer for a new value.
|
||||
for ; scanp < len(dec.buf); scanp++ {
|
||||
c := dec.buf[scanp]
|
||||
dec.scan.bytes++
|
||||
switch dec.scan.step(&dec.scan, c) {
|
||||
case scanEnd:
|
||||
// scanEnd is delayed one byte so we decrement
|
||||
// the scanner bytes count by 1 to ensure that
|
||||
// this value is correct in the next call of Decode.
|
||||
dec.scan.bytes--
|
||||
break Input
|
||||
case scanEndObject, scanEndArray:
|
||||
// scanEnd is delayed one byte.
|
||||
// We might block trying to get that byte from src,
|
||||
// so instead invent a space byte.
|
||||
if stateEndValue(&dec.scan, ' ') == scanEnd {
|
||||
scanp++
|
||||
break Input
|
||||
}
|
||||
case scanError:
|
||||
dec.err = dec.scan.err
|
||||
return 0, dec.scan.err
|
||||
}
|
||||
}
|
||||
|
||||
// Did the last read have an error?
|
||||
// Delayed until now to allow buffer scan.
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if dec.scan.step(&dec.scan, ' ') == scanEnd {
|
||||
break Input
|
||||
}
|
||||
if nonSpace(dec.buf) {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
}
|
||||
dec.err = err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
n := scanp - dec.scanp
|
||||
err = dec.refill()
|
||||
scanp = dec.scanp + n
|
||||
}
|
||||
return scanp - dec.scanp, nil
|
||||
}
|
||||
|
||||
func (dec *Decoder) refill() error {
|
||||
// Make room to read more into the buffer.
|
||||
// First slide down data already consumed.
|
||||
if dec.scanp > 0 {
|
||||
dec.scanned += int64(dec.scanp)
|
||||
n := copy(dec.buf, dec.buf[dec.scanp:])
|
||||
dec.buf = dec.buf[:n]
|
||||
dec.scanp = 0
|
||||
}
|
||||
|
||||
// Grow buffer if not large enough.
|
||||
const minRead = 512
|
||||
if cap(dec.buf)-len(dec.buf) < minRead {
|
||||
newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead)
|
||||
copy(newBuf, dec.buf)
|
||||
dec.buf = newBuf
|
||||
}
|
||||
|
||||
// Read. Delay error for next iteration (after scan).
|
||||
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
|
||||
dec.buf = dec.buf[0 : len(dec.buf)+n]
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func nonSpace(b []byte) bool {
|
||||
for _, c := range b {
|
||||
if !isSpace(c) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// An Encoder writes JSON values to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
err error
|
||||
escapeHTML bool
|
||||
|
||||
indentBuf []byte
|
||||
indentPrefix string
|
||||
indentValue string
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{w: w, escapeHTML: true}
|
||||
}
|
||||
|
||||
// Encode writes the JSON encoding of v to the stream,
|
||||
// followed by a newline character.
|
||||
//
|
||||
// See the documentation for Marshal for details about the
|
||||
// conversion of Go values to JSON.
|
||||
func (enc *Encoder) Encode(v any) error {
|
||||
if enc.err != nil {
|
||||
return enc.err
|
||||
}
|
||||
|
||||
e := newEncodeState()
|
||||
defer encodeStatePool.Put(e)
|
||||
|
||||
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Terminate each value with a newline.
|
||||
// This makes the output look a little nicer
|
||||
// when debugging, and some kind of space
|
||||
// is required if the encoded value was a number,
|
||||
// so that the reader knows there aren't more
|
||||
// digits coming.
|
||||
e.WriteByte('\n')
|
||||
|
||||
b := e.Bytes()
|
||||
if enc.indentPrefix != "" || enc.indentValue != "" {
|
||||
enc.indentBuf, err = appendIndent(enc.indentBuf[:0], b, enc.indentPrefix, enc.indentValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b = enc.indentBuf
|
||||
}
|
||||
if _, err = enc.w.Write(b); err != nil {
|
||||
enc.err = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// SetIndent instructs the encoder to format each subsequent encoded
|
||||
// value as if indented by the package-level function Indent(dst, src, prefix, indent).
|
||||
// Calling SetIndent("", "") disables indentation.
|
||||
func (enc *Encoder) SetIndent(prefix, indent string) {
|
||||
enc.indentPrefix = prefix
|
||||
enc.indentValue = indent
|
||||
}
|
||||
|
||||
// SetEscapeHTML specifies whether problematic HTML characters
|
||||
// should be escaped inside JSON quoted strings.
|
||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e
|
||||
// to avoid certain safety problems that can arise when embedding JSON in HTML.
|
||||
//
|
||||
// In non-HTML settings where the escaping interferes with the readability
|
||||
// of the output, SetEscapeHTML(false) disables this behavior.
|
||||
func (enc *Encoder) SetEscapeHTML(on bool) {
|
||||
enc.escapeHTML = on
|
||||
}
|
||||
|
||||
// RawMessage is a raw encoded JSON value.
|
||||
// It implements Marshaler and Unmarshaler and can
|
||||
// be used to delay JSON decoding or precompute a JSON encoding.
|
||||
type RawMessage []byte
|
||||
|
||||
// MarshalJSON returns m as the JSON encoding of m.
|
||||
func (m RawMessage) MarshalJSON() ([]byte, error) {
|
||||
if m == nil {
|
||||
return []byte("null"), nil
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON sets *m to a copy of data.
|
||||
func (m *RawMessage) UnmarshalJSON(data []byte) error {
|
||||
if m == nil {
|
||||
return errors.New("json.RawMessage: UnmarshalJSON on nil pointer")
|
||||
}
|
||||
*m = append((*m)[0:0], data...)
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
_ Marshaler = (*RawMessage)(nil)
|
||||
_ Unmarshaler = (*RawMessage)(nil)
|
||||
)
|
||||
|
||||
// A Token holds a value of one of these types:
|
||||
//
|
||||
// Delim, for the four JSON delimiters [ ] { }
|
||||
// bool, for JSON booleans
|
||||
// float64, for JSON numbers
|
||||
// Number, for JSON numbers
|
||||
// string, for JSON string literals
|
||||
// nil, for JSON null
|
||||
type Token any
|
||||
|
||||
const (
|
||||
tokenTopValue = iota
|
||||
tokenArrayStart
|
||||
tokenArrayValue
|
||||
tokenArrayComma
|
||||
tokenObjectStart
|
||||
tokenObjectKey
|
||||
tokenObjectColon
|
||||
tokenObjectValue
|
||||
tokenObjectComma
|
||||
)
|
||||
|
||||
// advance tokenstate from a separator state to a value state
|
||||
func (dec *Decoder) tokenPrepareForDecode() error {
|
||||
// Note: Not calling peek before switch, to avoid
|
||||
// putting peek into the standard Decode path.
|
||||
// peek is only called when using the Token API.
|
||||
switch dec.tokenState {
|
||||
case tokenArrayComma:
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ',' {
|
||||
return &SyntaxError{"expected comma after array element", dec.InputOffset()}
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenArrayValue
|
||||
case tokenObjectColon:
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if c != ':' {
|
||||
return &SyntaxError{"expected colon after object key", dec.InputOffset()}
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectValue
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenValueAllowed() bool {
|
||||
switch dec.tokenState {
|
||||
case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenValueEnd() {
|
||||
switch dec.tokenState {
|
||||
case tokenArrayStart, tokenArrayValue:
|
||||
dec.tokenState = tokenArrayComma
|
||||
case tokenObjectValue:
|
||||
dec.tokenState = tokenObjectComma
|
||||
}
|
||||
}
|
||||
|
||||
// A Delim is a JSON array or object delimiter, one of [ ] { or }.
|
||||
type Delim rune
|
||||
|
||||
func (d Delim) String() string {
|
||||
return string(d)
|
||||
}
|
||||
|
||||
// Token returns the next JSON token in the input stream.
|
||||
// At the end of the input stream, Token returns nil, io.EOF.
|
||||
//
|
||||
// Token guarantees that the delimiters [ ] { } it returns are
|
||||
// properly nested and matched: if Token encounters an unexpected
|
||||
// delimiter in the input, it will return an error.
|
||||
//
|
||||
// The input stream consists of basic JSON values—bool, string,
|
||||
// number, and null—along with delimiters [ ] { } of type Delim
|
||||
// to mark the start and end of arrays and objects.
|
||||
// Commas and colons are elided.
|
||||
func (dec *Decoder) Token() (Token, error) {
|
||||
for {
|
||||
c, err := dec.peek()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch c {
|
||||
case '[':
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
||||
dec.tokenState = tokenArrayStart
|
||||
return Delim('['), nil
|
||||
|
||||
case ']':
|
||||
if dec.tokenState != tokenArrayStart && dec.tokenState != tokenArrayComma {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
||||
dec.tokenValueEnd()
|
||||
return Delim(']'), nil
|
||||
|
||||
case '{':
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
||||
dec.tokenState = tokenObjectStart
|
||||
return Delim('{'), nil
|
||||
|
||||
case '}':
|
||||
if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
||||
dec.tokenValueEnd()
|
||||
return Delim('}'), nil
|
||||
|
||||
case ':':
|
||||
if dec.tokenState != tokenObjectColon {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectValue
|
||||
continue
|
||||
|
||||
case ',':
|
||||
if dec.tokenState == tokenArrayComma {
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenArrayValue
|
||||
continue
|
||||
}
|
||||
if dec.tokenState == tokenObjectComma {
|
||||
dec.scanp++
|
||||
dec.tokenState = tokenObjectKey
|
||||
continue
|
||||
}
|
||||
return dec.tokenError(c)
|
||||
|
||||
case '"':
|
||||
if dec.tokenState == tokenObjectStart || dec.tokenState == tokenObjectKey {
|
||||
var x string
|
||||
old := dec.tokenState
|
||||
dec.tokenState = tokenTopValue
|
||||
err := dec.Decode(&x)
|
||||
dec.tokenState = old
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dec.tokenState = tokenObjectColon
|
||||
return x, nil
|
||||
}
|
||||
fallthrough
|
||||
|
||||
default:
|
||||
if !dec.tokenValueAllowed() {
|
||||
return dec.tokenError(c)
|
||||
}
|
||||
var x any
|
||||
if err := dec.Decode(&x); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return x, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *Decoder) tokenError(c byte) (Token, error) {
|
||||
var context string
|
||||
switch dec.tokenState {
|
||||
case tokenTopValue:
|
||||
context = " looking for beginning of value"
|
||||
case tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
||||
context = " looking for beginning of value"
|
||||
case tokenArrayComma:
|
||||
context = " after array element"
|
||||
case tokenObjectKey:
|
||||
context = " looking for beginning of object key string"
|
||||
case tokenObjectColon:
|
||||
context = " after object key"
|
||||
case tokenObjectComma:
|
||||
context = " after object key:value pair"
|
||||
}
|
||||
return nil, &SyntaxError{"invalid character " + quoteChar(c) + context, dec.InputOffset()}
|
||||
}
|
||||
|
||||
// More reports whether there is another element in the
|
||||
// current array or object being parsed.
|
||||
func (dec *Decoder) More() bool {
|
||||
c, err := dec.peek()
|
||||
return err == nil && c != ']' && c != '}'
|
||||
}
|
||||
|
||||
func (dec *Decoder) peek() (byte, error) {
|
||||
var err error
|
||||
for {
|
||||
for i := dec.scanp; i < len(dec.buf); i++ {
|
||||
c := dec.buf[i]
|
||||
if isSpace(c) {
|
||||
continue
|
||||
}
|
||||
dec.scanp = i
|
||||
return c, nil
|
||||
}
|
||||
// buffer has been scanned, now report any error
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
err = dec.refill()
|
||||
}
|
||||
}
|
||||
|
||||
// InputOffset returns the input stream byte offset of the current decoder position.
|
||||
// The offset gives the location of the end of the most recently returned token
|
||||
// and the beginning of the next token.
|
||||
func (dec *Decoder) InputOffset() int64 {
|
||||
return dec.scanned + int64(dec.scanp)
|
||||
}
|
218
common/contextjson/tables.go
Normal file
218
common/contextjson/tables.go
Normal file
@ -0,0 +1,218 @@
|
||||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// safeSet holds the value true if the ASCII character with the given array
|
||||
// position can be represented inside a JSON string without any further
|
||||
// escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), and the backslash character ("\").
|
||||
var safeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': true,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': true,
|
||||
'=': true,
|
||||
'>': true,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
// htmlSafeSet holds the value true if the ASCII character with the given
|
||||
// array position can be safely represented inside a JSON string, embedded
|
||||
// inside of HTML <script> tags, without any additional escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), the backslash character ("\"), HTML opening and closing
|
||||
// tags ("<" and ">"), and the ampersand ("&").
|
||||
var htmlSafeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': false,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': false,
|
||||
'=': true,
|
||||
'>': false,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
38
common/contextjson/tags.go
Normal file
38
common/contextjson/tags.go
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package json
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// tagOptions is the string following a comma in a struct field's "json"
|
||||
// tag, or the empty string. It does not include the leading comma.
|
||||
type tagOptions string
|
||||
|
||||
// parseTag splits a struct field's json tag into its name and
|
||||
// comma-separated options.
|
||||
func parseTag(tag string) (string, tagOptions) {
|
||||
tag, opt, _ := strings.Cut(tag, ",")
|
||||
return tag, tagOptions(opt)
|
||||
}
|
||||
|
||||
// Contains reports whether a comma-separated list of options
|
||||
// contains a particular substr flag. substr must be surrounded by a
|
||||
// string boundary or commas.
|
||||
func (o tagOptions) Contains(optionName string) bool {
|
||||
if len(o) == 0 {
|
||||
return false
|
||||
}
|
||||
s := string(o)
|
||||
for s != "" {
|
||||
var name string
|
||||
name, s, _ = strings.Cut(s, ",")
|
||||
if name == optionName {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
@ -6,7 +6,6 @@ import (
|
||||
"sync"
|
||||
|
||||
"github.com/sagernet/sing-box/adapter"
|
||||
"github.com/sagernet/sing/common/bufio/deadline"
|
||||
E "github.com/sagernet/sing/common/exceptions"
|
||||
M "github.com/sagernet/sing/common/metadata"
|
||||
N "github.com/sagernet/sing/common/network"
|
||||
@ -45,14 +44,7 @@ func (d *DetourDialer) DialContext(ctx context.Context, network string, destinat
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
conn, err := dialer.DialContext(ctx, network, destination)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if deadline.NeedAdditionalReadDeadline(conn) {
|
||||
conn = deadline.NewConn(conn)
|
||||
}
|
||||
return conn, nil
|
||||
return dialer.DialContext(ctx, network, destination)
|
||||
}
|
||||
|
||||
func (d *DetourDialer) ListenPacket(ctx context.Context, destination M.Socksaddr) (net.PacketConn, error) {
|
||||
|
21
common/json/context.go
Normal file
21
common/json/context.go
Normal file
@ -0,0 +1,21 @@
|
||||
//go:build go1.21 && !without_contextjson
|
||||
|
||||
package json
|
||||
|
||||
import "github.com/sagernet/sing-box/common/contextjson"
|
||||
|
||||
var (
|
||||
Marshal = json.Marshal
|
||||
Unmarshal = json.Unmarshal
|
||||
NewEncoder = json.NewEncoder
|
||||
NewDecoder = json.NewDecoder
|
||||
)
|
||||
|
||||
type (
|
||||
Encoder = json.Encoder
|
||||
Decoder = json.Decoder
|
||||
Token = json.Token
|
||||
Delim = json.Delim
|
||||
SyntaxError = json.SyntaxError
|
||||
RawMessage = json.RawMessage
|
||||
)
|
@ -1,3 +1,5 @@
|
||||
//go:build !go1.21 || without_contextjson
|
||||
|
||||
package json
|
||||
|
||||
import "encoding/json"
|
||||
@ -15,4 +17,5 @@ type (
|
||||
Token = json.Token
|
||||
Delim = json.Delim
|
||||
SyntaxError = json.SyntaxError
|
||||
RawMessage = json.RawMessage
|
||||
)
|
||||
|
@ -4,6 +4,34 @@ icon: material/alert-decagram
|
||||
|
||||
# ChangeLog
|
||||
|
||||
#### 1.8.0-alpha.8
|
||||
|
||||
* Add context to JSON decode error message **1**
|
||||
* Fixes and improvements
|
||||
|
||||
**1**:
|
||||
|
||||
JSON parse errors will now include the current key path.
|
||||
Only takes effect when compiled with Go 1.21+.
|
||||
|
||||
#### 1.8.0-alpha.7
|
||||
|
||||
* Fixes and improvements
|
||||
|
||||
#### 1.7.1
|
||||
|
||||
* Fixes and improvements
|
||||
|
||||
#### 1.8.0-alpha.6
|
||||
|
||||
* Fix rule-set matching logic **1**
|
||||
* Fixes and improvements
|
||||
|
||||
**1**:
|
||||
|
||||
Now the rules in the `rule_set` rule item can be logically considered to be merged into the rule using rule sets,
|
||||
rather than completely following the AND logic.
|
||||
|
||||
#### 1.8.0-alpha.5
|
||||
|
||||
* Parallel rule-set initialization
|
||||
|
@ -3,9 +3,9 @@ package libbox
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/sagernet/sing-box"
|
||||
"github.com/sagernet/sing-box/common/json"
|
||||
"github.com/sagernet/sing-box/option"
|
||||
E "github.com/sagernet/sing/common/exceptions"
|
||||
)
|
||||
|
2
go.mod
2
go.mod
@ -26,7 +26,7 @@ require (
|
||||
github.com/sagernet/gvisor v0.0.0-20231119034329-07cfb6aaf930
|
||||
github.com/sagernet/quic-go v0.40.0
|
||||
github.com/sagernet/reality v0.0.0-20230406110435-ee17307e7691
|
||||
github.com/sagernet/sing v0.2.18-0.20231130092223-1f82310f0375
|
||||
github.com/sagernet/sing v0.2.18-0.20231201060417-575186ed63c2
|
||||
github.com/sagernet/sing-dns v0.1.11
|
||||
github.com/sagernet/sing-mux v0.1.5-0.20231109075101-6b086ed6bb07
|
||||
github.com/sagernet/sing-quic v0.1.5-0.20231123150216-00957d136203
|
||||
|
3
go.sum
3
go.sum
@ -110,8 +110,7 @@ github.com/sagernet/reality v0.0.0-20230406110435-ee17307e7691 h1:5Th31OC6yj8byL
|
||||
github.com/sagernet/reality v0.0.0-20230406110435-ee17307e7691/go.mod h1:B8lp4WkQ1PwNnrVMM6KyuFR20pU8jYBD+A4EhJovEXU=
|
||||
github.com/sagernet/sing v0.0.0-20220817130738-ce854cda8522/go.mod h1:QVsS5L/ZA2Q5UhQwLrn0Trw+msNd/NPGEhBKR/ioWiY=
|
||||
github.com/sagernet/sing v0.1.8/go.mod h1:jt1w2u7lJQFFSGLiRrRIs5YWmx4kAPfWuOejuDW9qMk=
|
||||
github.com/sagernet/sing v0.2.18-0.20231130092223-1f82310f0375 h1:5Q5K/twBNT1Hrpjd5Ghft0Sv0V+eVfTZX17CiPItSV8=
|
||||
github.com/sagernet/sing v0.2.18-0.20231130092223-1f82310f0375/go.mod h1:OL6k2F0vHmEzXz2KW19qQzu172FDgSbUSODylighuVo=
|
||||
github.com/sagernet/sing v0.2.18-0.20231201060417-575186ed63c2 h1:1ydWkFgLURGlrnwRdjyrpo9lp1g5Qq7XrNBghMntWTs=
|
||||
github.com/sagernet/sing-dns v0.1.11 h1:PPrMCVVrAeR3f5X23I+cmvacXJ+kzuyAsBiWyUKhGSE=
|
||||
github.com/sagernet/sing-dns v0.1.11/go.mod h1:zJ/YjnYB61SYE+ubMcMqVdpaSvsyQ2iShQGO3vuLvvE=
|
||||
github.com/sagernet/sing-mux v0.1.5-0.20231109075101-6b086ed6bb07 h1:ncKb5tVOsCQgCsv6UpsA0jinbNb5OQ5GMPJlyQP3EHM=
|
||||
|
@ -1,9 +1,8 @@
|
||||
package option
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/sagernet/sing-box/common/humanize"
|
||||
"github.com/sagernet/sing-box/common/json"
|
||||
)
|
||||
|
||||
type DebugOptions struct {
|
||||
|
@ -114,7 +114,7 @@ func (h *Inbound) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_Inbound)(h), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "inbound options")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ func (h *Outbound) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_Outbound)(h), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "outbound options")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ func (r *Rule) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_Rule)(r), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "route rule")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ func (r *DNSRule) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_DNSRule)(r), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "dns route rule")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ func (r *RuleSet) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_RuleSet)(r), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "rule set")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -118,7 +118,7 @@ func (r *HeadlessRule) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_HeadlessRule)(r), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "route rule-set rule")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -209,7 +209,7 @@ func (r *PlainRuleSetCompat) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_PlainRuleSetCompat)(r), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "rule set")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
226
option/time_unit.go
Normal file
226
option/time_unit.go
Normal file
@ -0,0 +1,226 @@
|
||||
package option
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
const durationDay = 24 * time.Hour
|
||||
|
||||
var unitMap = map[string]uint64{
|
||||
"ns": uint64(time.Nanosecond),
|
||||
"us": uint64(time.Microsecond),
|
||||
"µs": uint64(time.Microsecond), // U+00B5 = micro symbol
|
||||
"μs": uint64(time.Microsecond), // U+03BC = Greek letter mu
|
||||
"ms": uint64(time.Millisecond),
|
||||
"s": uint64(time.Second),
|
||||
"m": uint64(time.Minute),
|
||||
"h": uint64(time.Hour),
|
||||
"d": uint64(durationDay),
|
||||
}
|
||||
|
||||
// ParseDuration parses a duration string.
|
||||
// A duration string is a possibly signed sequence of
|
||||
// decimal numbers, each with optional fraction and a unit suffix,
|
||||
// such as "300ms", "-1.5h" or "2h45m".
|
||||
// Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".
|
||||
func ParseDuration(s string) (Duration, error) {
|
||||
// [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+
|
||||
orig := s
|
||||
var d uint64
|
||||
neg := false
|
||||
|
||||
// Consume [-+]?
|
||||
if s != "" {
|
||||
c := s[0]
|
||||
if c == '-' || c == '+' {
|
||||
neg = c == '-'
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
// Special case: if all that is left is "0", this is zero.
|
||||
if s == "0" {
|
||||
return 0, nil
|
||||
}
|
||||
if s == "" {
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
for s != "" {
|
||||
var (
|
||||
v, f uint64 // integers before, after decimal point
|
||||
scale float64 = 1 // value = v + f/scale
|
||||
)
|
||||
|
||||
var err error
|
||||
|
||||
// The next character must be [0-9.]
|
||||
if !(s[0] == '.' || '0' <= s[0] && s[0] <= '9') {
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
// Consume [0-9]*
|
||||
pl := len(s)
|
||||
v, s, err = leadingInt(s)
|
||||
if err != nil {
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
pre := pl != len(s) // whether we consumed anything before a period
|
||||
|
||||
// Consume (\.[0-9]*)?
|
||||
post := false
|
||||
if s != "" && s[0] == '.' {
|
||||
s = s[1:]
|
||||
pl := len(s)
|
||||
f, scale, s = leadingFraction(s)
|
||||
post = pl != len(s)
|
||||
}
|
||||
if !pre && !post {
|
||||
// no digits (e.g. ".s" or "-.s")
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
|
||||
// Consume unit.
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c == '.' || '0' <= c && c <= '9' {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == 0 {
|
||||
return 0, errors.New("time: missing unit in duration " + quote(orig))
|
||||
}
|
||||
u := s[:i]
|
||||
s = s[i:]
|
||||
unit, ok := unitMap[u]
|
||||
if !ok {
|
||||
return 0, errors.New("time: unknown unit " + quote(u) + " in duration " + quote(orig))
|
||||
}
|
||||
if v > 1<<63/unit {
|
||||
// overflow
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
v *= unit
|
||||
if f > 0 {
|
||||
// float64 is needed to be nanosecond accurate for fractions of hours.
|
||||
// v >= 0 && (f*unit/scale) <= 3.6e+12 (ns/h, h is the largest unit)
|
||||
v += uint64(float64(f) * (float64(unit) / scale))
|
||||
if v > 1<<63 {
|
||||
// overflow
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
}
|
||||
d += v
|
||||
if d > 1<<63 {
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
}
|
||||
if neg {
|
||||
return -Duration(d), nil
|
||||
}
|
||||
if d > 1<<63-1 {
|
||||
return 0, errors.New("time: invalid duration " + quote(orig))
|
||||
}
|
||||
return Duration(d), nil
|
||||
}
|
||||
|
||||
var errLeadingInt = errors.New("time: bad [0-9]*") // never printed
|
||||
|
||||
// leadingInt consumes the leading [0-9]* from s.
|
||||
func leadingInt[bytes []byte | string](s bytes) (x uint64, rem bytes, err error) {
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c < '0' || c > '9' {
|
||||
break
|
||||
}
|
||||
if x > 1<<63/10 {
|
||||
// overflow
|
||||
return 0, rem, errLeadingInt
|
||||
}
|
||||
x = x*10 + uint64(c) - '0'
|
||||
if x > 1<<63 {
|
||||
// overflow
|
||||
return 0, rem, errLeadingInt
|
||||
}
|
||||
}
|
||||
return x, s[i:], nil
|
||||
}
|
||||
|
||||
// leadingFraction consumes the leading [0-9]* from s.
|
||||
// It is used only for fractions, so does not return an error on overflow,
|
||||
// it just stops accumulating precision.
|
||||
func leadingFraction(s string) (x uint64, scale float64, rem string) {
|
||||
i := 0
|
||||
scale = 1
|
||||
overflow := false
|
||||
for ; i < len(s); i++ {
|
||||
c := s[i]
|
||||
if c < '0' || c > '9' {
|
||||
break
|
||||
}
|
||||
if overflow {
|
||||
continue
|
||||
}
|
||||
if x > (1<<63-1)/10 {
|
||||
// It's possible for overflow to give a positive number, so take care.
|
||||
overflow = true
|
||||
continue
|
||||
}
|
||||
y := x*10 + uint64(c) - '0'
|
||||
if y > 1<<63 {
|
||||
overflow = true
|
||||
continue
|
||||
}
|
||||
x = y
|
||||
scale *= 10
|
||||
}
|
||||
return x, scale, s[i:]
|
||||
}
|
||||
|
||||
// These are borrowed from unicode/utf8 and strconv and replicate behavior in
|
||||
// that package, since we can't take a dependency on either.
|
||||
const (
|
||||
lowerhex = "0123456789abcdef"
|
||||
runeSelf = 0x80
|
||||
runeError = '\uFFFD'
|
||||
)
|
||||
|
||||
func quote(s string) string {
|
||||
buf := make([]byte, 1, len(s)+2) // slice will be at least len(s) + quotes
|
||||
buf[0] = '"'
|
||||
for i, c := range s {
|
||||
if c >= runeSelf || c < ' ' {
|
||||
// This means you are asking us to parse a time.Duration or
|
||||
// time.Location with unprintable or non-ASCII characters in it.
|
||||
// We don't expect to hit this case very often. We could try to
|
||||
// reproduce strconv.Quote's behavior with full fidelity but
|
||||
// given how rarely we expect to hit these edge cases, speed and
|
||||
// conciseness are better.
|
||||
var width int
|
||||
if c == runeError {
|
||||
width = 1
|
||||
if i+2 < len(s) && s[i:i+3] == string(runeError) {
|
||||
width = 3
|
||||
}
|
||||
} else {
|
||||
width = len(string(c))
|
||||
}
|
||||
for j := 0; j < width; j++ {
|
||||
buf = append(buf, `\x`...)
|
||||
buf = append(buf, lowerhex[s[i+j]>>4])
|
||||
buf = append(buf, lowerhex[s[i+j]&0xF])
|
||||
}
|
||||
} else {
|
||||
if c == '"' || c == '\\' {
|
||||
buf = append(buf, '\\')
|
||||
}
|
||||
buf = append(buf, string(c)...)
|
||||
}
|
||||
}
|
||||
buf = append(buf, '"')
|
||||
return string(buf)
|
||||
}
|
@ -62,7 +62,7 @@ func (o *ACMEDNS01ChallengeOptions) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_ACMEDNS01ChallengeOptions)(o), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "DNS01 challenge options")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ func (d *Duration) UnmarshalJSON(bytes []byte) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
duration, err := time.ParseDuration(value)
|
||||
duration, err := ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ func (o *V2RayTransportOptions) UnmarshalJSON(bytes []byte) error {
|
||||
}
|
||||
err = UnmarshallExcluded(bytes, (*_V2RayTransportOptions)(o), v)
|
||||
if err != nil {
|
||||
return E.Cause(err, "vmess transport options")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -194,9 +194,7 @@ func (d *DNS) newPacketConnection(ctx context.Context, conn N.PacketConn, readWa
|
||||
group.Append0(func(ctx context.Context) error {
|
||||
var buffer *buf.Buffer
|
||||
readWaiter.InitializeReadWaiter(func() *buf.Buffer {
|
||||
buffer = buf.NewSize(dns.FixedPacketSize)
|
||||
buffer.FullReset()
|
||||
return buffer
|
||||
return buf.NewSize(dns.FixedPacketSize)
|
||||
})
|
||||
defer readWaiter.InitializeReadWaiter(nil)
|
||||
for {
|
||||
|
@ -545,6 +545,18 @@ func (r *Router) Start() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Router) PostStart() error {
|
||||
if len(r.ruleSets) > 0 {
|
||||
for i, ruleSet := range r.ruleSets {
|
||||
err := ruleSet.PostStart()
|
||||
if err != nil {
|
||||
return E.Cause(err, "post start rule-set[", i, "]")
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Router) Close() error {
|
||||
var err error
|
||||
for i, rule := range r.rules {
|
||||
@ -690,7 +702,6 @@ func (r *Router) RouteConnection(ctx context.Context, conn net.Conn, metadata ad
|
||||
|
||||
if metadata.InboundOptions.SniffEnabled {
|
||||
buffer := buf.NewPacket()
|
||||
buffer.FullReset()
|
||||
sniffMetadata, err := sniff.PeekStream(ctx, conn, buffer, time.Duration(metadata.InboundOptions.SniffTimeout), sniff.StreamDomainNameQuery, sniff.TLSClientHello, sniff.HTTPHost)
|
||||
if sniffMetadata != nil {
|
||||
metadata.Protocol = sniffMetadata.Protocol
|
||||
@ -806,7 +817,6 @@ func (r *Router) RoutePacketConnection(ctx context.Context, conn N.PacketConn, m
|
||||
|
||||
if metadata.InboundOptions.SniffEnabled || metadata.Destination.Addr.IsUnspecified() {
|
||||
buffer := buf.NewPacket()
|
||||
buffer.FullReset()
|
||||
destination, err := conn.ReadPacket(buffer)
|
||||
if err != nil {
|
||||
buffer.Release()
|
||||
@ -922,6 +932,7 @@ func (r *Router) match0(ctx context.Context, metadata *adapter.InboundContext, d
|
||||
}
|
||||
}
|
||||
for i, rule := range r.rules {
|
||||
metadata.ResetRuleCache()
|
||||
if rule.Match(metadata) {
|
||||
detour := rule.Outbound()
|
||||
r.logger.DebugContext(ctx, "match[", i, "] ", rule.String(), " => ", detour)
|
||||
|
@ -43,6 +43,7 @@ func (r *Router) matchDNS(ctx context.Context) (context.Context, dns.Transport,
|
||||
panic("no context")
|
||||
}
|
||||
for i, rule := range r.dnsRules {
|
||||
metadata.ResetRuleCache()
|
||||
if rule.Match(metadata) {
|
||||
detour := rule.Outbound()
|
||||
transport, loaded := r.transportMap[detour]
|
||||
|
@ -17,6 +17,7 @@ type abstractDefaultRule struct {
|
||||
destinationAddressItems []RuleItem
|
||||
destinationPortItems []RuleItem
|
||||
allItems []RuleItem
|
||||
ruleSetItem RuleItem
|
||||
invert bool
|
||||
outbound string
|
||||
}
|
||||
@ -62,63 +63,63 @@ func (r *abstractDefaultRule) Match(metadata *adapter.InboundContext) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
if len(r.sourceAddressItems) > 0 && !metadata.SourceAddressMatch {
|
||||
for _, item := range r.sourceAddressItems {
|
||||
if item.Match(metadata) {
|
||||
metadata.SourceAddressMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.sourcePortItems) > 0 && !metadata.SourceAddressMatch {
|
||||
for _, item := range r.sourcePortItems {
|
||||
if item.Match(metadata) {
|
||||
metadata.SourcePortMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.destinationAddressItems) > 0 && !metadata.SourceAddressMatch {
|
||||
for _, item := range r.destinationAddressItems {
|
||||
if item.Match(metadata) {
|
||||
metadata.DestinationAddressMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.destinationPortItems) > 0 && !metadata.SourceAddressMatch {
|
||||
for _, item := range r.destinationPortItems {
|
||||
if item.Match(metadata) {
|
||||
metadata.DestinationPortMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, item := range r.items {
|
||||
if !item.Match(metadata) {
|
||||
return r.invert
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.sourceAddressItems) > 0 {
|
||||
var sourceAddressMatch bool
|
||||
for _, item := range r.sourceAddressItems {
|
||||
if item.Match(metadata) {
|
||||
sourceAddressMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !sourceAddressMatch {
|
||||
if len(r.sourceAddressItems) > 0 && !metadata.SourceAddressMatch {
|
||||
return r.invert
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.sourcePortItems) > 0 {
|
||||
var sourcePortMatch bool
|
||||
for _, item := range r.sourcePortItems {
|
||||
if item.Match(metadata) {
|
||||
sourcePortMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !sourcePortMatch {
|
||||
if len(r.sourcePortItems) > 0 && !metadata.SourcePortMatch {
|
||||
return r.invert
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.destinationAddressItems) > 0 {
|
||||
var destinationAddressMatch bool
|
||||
for _, item := range r.destinationAddressItems {
|
||||
if item.Match(metadata) {
|
||||
destinationAddressMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !destinationAddressMatch {
|
||||
if len(r.destinationAddressItems) > 0 && !metadata.DestinationAddressMatch {
|
||||
return r.invert
|
||||
}
|
||||
}
|
||||
|
||||
if len(r.destinationPortItems) > 0 {
|
||||
var destinationPortMatch bool
|
||||
for _, item := range r.destinationPortItems {
|
||||
if item.Match(metadata) {
|
||||
destinationPortMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !destinationPortMatch {
|
||||
if len(r.destinationPortItems) > 0 && !metadata.DestinationPortMatch {
|
||||
return r.invert
|
||||
}
|
||||
}
|
||||
|
||||
return !r.invert
|
||||
}
|
||||
@ -188,10 +189,12 @@ func (r *abstractLogicalRule) Close() error {
|
||||
func (r *abstractLogicalRule) Match(metadata *adapter.InboundContext) bool {
|
||||
if r.mode == C.LogicalTypeAnd {
|
||||
return common.All(r.rules, func(it adapter.HeadlessRule) bool {
|
||||
metadata.ResetRuleCache()
|
||||
return it.Match(metadata)
|
||||
}) != r.invert
|
||||
} else {
|
||||
return common.Any(r.rules, func(it adapter.HeadlessRule) bool {
|
||||
metadata.ResetRuleCache()
|
||||
return it.Match(metadata)
|
||||
}) != r.invert
|
||||
}
|
||||
|
@ -65,6 +65,10 @@ func (s *LocalRuleSet) StartContext(ctx context.Context, startContext adapter.Ru
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *LocalRuleSet) PostStart() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *LocalRuleSet) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ import (
|
||||
M "github.com/sagernet/sing/common/metadata"
|
||||
N "github.com/sagernet/sing/common/network"
|
||||
"github.com/sagernet/sing/service"
|
||||
"github.com/sagernet/sing/service/pause"
|
||||
)
|
||||
|
||||
var _ adapter.RuleSet = (*RemoteRuleSet)(nil)
|
||||
@ -34,6 +35,7 @@ type RemoteRuleSet struct {
|
||||
lastUpdated time.Time
|
||||
lastEtag string
|
||||
updateTicker *time.Ticker
|
||||
pauseManager pause.Manager
|
||||
}
|
||||
|
||||
func NewRemoteRuleSet(ctx context.Context, router adapter.Router, logger logger.ContextLogger, options option.RuleSet) *RemoteRuleSet {
|
||||
@ -51,6 +53,7 @@ func NewRemoteRuleSet(ctx context.Context, router adapter.Router, logger logger.
|
||||
logger: logger,
|
||||
options: options,
|
||||
updateInterval: updateInterval,
|
||||
pauseManager: pause.ManagerFromContext(ctx),
|
||||
}
|
||||
}
|
||||
|
||||
@ -90,17 +93,21 @@ func (s *RemoteRuleSet) StartContext(ctx context.Context, startContext adapter.R
|
||||
s.lastEtag = savedSet.LastEtag
|
||||
}
|
||||
}
|
||||
if s.lastUpdated.IsZero() || time.Since(s.lastUpdated) > s.updateInterval {
|
||||
err := s.fetchOnce(ctx, startContext)
|
||||
if err != nil {
|
||||
return E.Cause(err, "fetch rule-set ", s.options.Tag)
|
||||
}
|
||||
}
|
||||
s.updateTicker = time.NewTicker(s.updateInterval)
|
||||
go s.loopUpdate()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *RemoteRuleSet) PostStart() error {
|
||||
if s.lastUpdated.IsZero() {
|
||||
err := s.fetchOnce(s.ctx, nil)
|
||||
if err != nil {
|
||||
s.logger.Error("fetch rule-set ", s.options.Tag, ": ", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *RemoteRuleSet) loadBytes(content []byte) error {
|
||||
var (
|
||||
plainRuleSet option.PlainRuleSet
|
||||
@ -136,11 +143,18 @@ func (s *RemoteRuleSet) loadBytes(content []byte) error {
|
||||
}
|
||||
|
||||
func (s *RemoteRuleSet) loopUpdate() {
|
||||
if time.Since(s.lastUpdated) > s.updateInterval {
|
||||
err := s.fetchOnce(s.ctx, nil)
|
||||
if err != nil {
|
||||
s.logger.Error("fetch rule-set ", s.options.Tag, ": ", err)
|
||||
}
|
||||
}
|
||||
for {
|
||||
select {
|
||||
case <-s.ctx.Done():
|
||||
return
|
||||
case <-s.updateTicker.C:
|
||||
s.pauseManager.WaitActive()
|
||||
err := s.fetchOnce(s.ctx, nil)
|
||||
if err != nil {
|
||||
s.logger.Error("fetch rule-set ", s.options.Tag, ": ", err)
|
||||
|
@ -12,6 +12,9 @@ import (
|
||||
"github.com/sagernet/sing-box/common/tls"
|
||||
C "github.com/sagernet/sing-box/constant"
|
||||
"github.com/sagernet/sing-box/option"
|
||||
"github.com/sagernet/sing/common/buf"
|
||||
"github.com/sagernet/sing/common/bufio"
|
||||
"github.com/sagernet/sing/common/bufio/deadline"
|
||||
E "github.com/sagernet/sing/common/exceptions"
|
||||
M "github.com/sagernet/sing/common/metadata"
|
||||
N "github.com/sagernet/sing/common/network"
|
||||
@ -87,18 +90,35 @@ func (c *Client) dialContext(ctx context.Context, requestURL *url.URL, headers h
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
conn.SetDeadline(time.Now().Add(C.TCPTimeout))
|
||||
var deadlineConn net.Conn
|
||||
if deadline.NeedAdditionalReadDeadline(conn) {
|
||||
deadlineConn = deadline.NewConn(conn)
|
||||
} else {
|
||||
deadlineConn = conn
|
||||
}
|
||||
err = deadlineConn.SetDeadline(time.Now().Add(C.TCPTimeout))
|
||||
if err != nil {
|
||||
return nil, E.Cause(err, "set read deadline")
|
||||
}
|
||||
var protocols []string
|
||||
if protocolHeader := headers.Get("Sec-WebSocket-Protocol"); protocolHeader != "" {
|
||||
protocols = []string{protocolHeader}
|
||||
headers.Del("Sec-WebSocket-Protocol")
|
||||
}
|
||||
reader, _, err := ws.Dialer{Header: ws.HandshakeHeaderHTTP(headers), Protocols: protocols}.Upgrade(conn, requestURL)
|
||||
conn.SetDeadline(time.Time{})
|
||||
reader, _, err := ws.Dialer{Header: ws.HandshakeHeaderHTTP(headers), Protocols: protocols}.Upgrade(deadlineConn, requestURL)
|
||||
deadlineConn.SetDeadline(time.Time{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewConn(conn, reader, nil, ws.StateClientSide), nil
|
||||
if reader != nil {
|
||||
buffer := buf.NewSize(reader.Buffered())
|
||||
_, err = buffer.ReadFullFrom(reader, buffer.Len())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
conn = bufio.NewCachedConn(conn, buffer)
|
||||
}
|
||||
return NewConn(conn, nil, ws.StateClientSide), nil
|
||||
}
|
||||
|
||||
func (c *Client) DialContext(ctx context.Context) (net.Conn, error) {
|
||||
|
@ -1,7 +1,6 @@
|
||||
package v2raywebsocket
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"io"
|
||||
@ -28,19 +27,13 @@ type WebsocketConn struct {
|
||||
remoteAddr net.Addr
|
||||
}
|
||||
|
||||
func NewConn(conn net.Conn, br *bufio.Reader, remoteAddr net.Addr, state ws.State) *WebsocketConn {
|
||||
func NewConn(conn net.Conn, remoteAddr net.Addr, state ws.State) *WebsocketConn {
|
||||
controlHandler := wsutil.ControlFrameHandler(conn, state)
|
||||
var reader io.Reader
|
||||
if br != nil && br.Buffered() > 0 {
|
||||
reader = br
|
||||
} else {
|
||||
reader = conn
|
||||
}
|
||||
return &WebsocketConn{
|
||||
Conn: conn,
|
||||
state: state,
|
||||
reader: &wsutil.Reader{
|
||||
Source: reader,
|
||||
Source: conn,
|
||||
State: state,
|
||||
SkipHeaderCheck: !debug.Enabled,
|
||||
OnIntermediate: controlHandler,
|
||||
|
@ -88,14 +88,14 @@ func (s *Server) ServeHTTP(writer http.ResponseWriter, request *http.Request) {
|
||||
s.invalidRequest(writer, request, http.StatusBadRequest, E.Cause(err, "decode early data"))
|
||||
return
|
||||
}
|
||||
wsConn, reader, _, err := ws.UpgradeHTTP(request, writer)
|
||||
wsConn, _, _, err := ws.UpgradeHTTP(request, writer)
|
||||
if err != nil {
|
||||
s.invalidRequest(writer, request, 0, E.Cause(err, "upgrade websocket connection"))
|
||||
return
|
||||
}
|
||||
var metadata M.Metadata
|
||||
metadata.Source = sHttp.SourceAddress(request)
|
||||
conn = NewConn(wsConn, reader.Reader, metadata.Source.TCPAddr(), ws.StateServerSide)
|
||||
conn = NewConn(wsConn, metadata.Source.TCPAddr(), ws.StateServerSide)
|
||||
if len(earlyData) > 0 {
|
||||
conn = bufio.NewCachedConn(conn, buf.As(earlyData))
|
||||
}
|
||||
|
@ -134,7 +134,7 @@ func (c *VisionConn) Read(p []byte) (n int, err error) {
|
||||
buffers = common.Map(buffers, func(it *buf.Buffer) *buf.Buffer {
|
||||
return it.ToOwned()
|
||||
})
|
||||
chunkBuffer.FullReset()
|
||||
chunkBuffer.Reset()
|
||||
}
|
||||
if c.remainingContent == 0 && c.remainingPadding == 0 {
|
||||
if c.currentCommand == commandPaddingEnd {
|
||||
|
Loading…
x
Reference in New Issue
Block a user