diff --git a/token.go b/token.go index 84eb7ee..707ae58 100644 --- a/token.go +++ b/token.go @@ -206,6 +206,8 @@ func (t *Token) ValueUnescaped() []byte { result = append(result, str[:idx]...) result = append(result, p...) str = str[idx+len(p)+1:] + } else { + break } } else { break diff --git a/tokenizer_test.go b/tokenizer_test.go index 20e2d00..55eeadf 100644 --- a/tokenizer_test.go +++ b/tokenizer_test.go @@ -86,6 +86,7 @@ func TestTokenize(t *testing.T) { {"два три", Token{key: TokenString, string: quote, value: []byte("\"два три\"")}}, {"one\" two", Token{key: TokenString, string: quote, value: []byte(`"one\" two"`)}}, {"", Token{key: TokenString, string: quote, value: []byte("\"\"")}}, + {`one\' two`, Token{key: TokenString, string: quote, value: []byte(`"one\' two"`)}}, } for _, v := range framed { stream := tokenizer.ParseBytes(v.token.value)