Skip to content

Commit

Permalink
chore: added and make use of Type.equals method
Browse files Browse the repository at this point in the history
       to prevent problems with bundling that
       could end up with having multiple copies
       of Type implementations.
       If there are multiple copy of type the term
       xyz.type === Type.map could fail.
  • Loading branch information
mabels committed Dec 4, 2024
1 parent 1256d66 commit 938cd77
Show file tree
Hide file tree
Showing 19 changed files with 130 additions and 104 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ import { decode, Tokenizer, Type } from 'cborg'
class CustomTokeniser extends Tokenizer {
next () {
const nextToken = super.next()
if (nextToken.type === Type.bytes) {
if (nextToken.type.equals(Type.bytes)) {
throw new Error('Unsupported type: bytes')
}
return nextToken
Expand Down
7 changes: 7 additions & 0 deletions cborg.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { encode } from './lib/encode.js'
import { decode, decodeFirst, Tokeniser, tokensToObject } from './lib/decode.js'
import { Token, Type } from './lib/token.js'

import * as json from './lib/json/json.js'

/**
* Export the types that were present in the original manual cborg.d.ts
* @typedef {import('./interface').TagDecoder} TagDecoder
Expand All @@ -12,6 +14,11 @@ import { Token, Type } from './lib/token.js'
*/

export {
// this is needed to prevent the bundleing trouble which happens
// due to the fact that token.js is used in lib/json and so in
// cborg/json which ends up on bundling to have two copies of token.js
// which will fail stmts like token.type === Type.array
json,
decode,
decodeFirst,
Tokeniser as Tokenizer,
Expand Down
2 changes: 1 addition & 1 deletion example-bytestrings.js
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ tags[tagUint64Array] = uint64ArrayDecoder
class ArrayBufferTransformingTokeniser extends Tokenizer {
next () {
const nextToken = super.next()
if (nextToken.type === Type.bytes) {
if (nextToken.type.equals(Type.bytes)) {
// Transform the (assumed) Uint8Array value to an ArrayBuffer of the same bytes, note though
// that all tags we care about are going to be <tag><bytes>, so we're also transforming those
// into ArrayBuffers, so our tag decoders need to also assume they are getting ArrayBuffers
Expand Down
2 changes: 1 addition & 1 deletion lib/2bytes.js
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export function decodeBytes64 (data, pos, _minor, options) {
*/
function tokenBytes (token) {
if (token.encodedBytes === undefined) {
token.encodedBytes = token.type === Type.string ? fromString(token.value) : token.value
token.encodedBytes = token.type.equals(Type.string) ? fromString(token.value) : token.value
}
// @ts-ignore c'mon
return token.encodedBytes
Expand Down
8 changes: 4 additions & 4 deletions lib/decode.js
Original file line number Diff line number Diff line change
Expand Up @@ -145,23 +145,23 @@ function tokensToObject (tokeniser, options) {

const token = tokeniser.next()

if (token.type === Type.break) {
if (token.type.equals(Type.break)) {
return BREAK
}

if (token.type.terminal) {
return token.value
}

if (token.type === Type.array) {
if (token.type.equals(Type.array)) {
return tokenToArray(token, tokeniser, options)
}

if (token.type === Type.map) {
if (token.type.equals(Type.map)) {
return tokenToMap(token, tokeniser, options)
}

if (token.type === Type.tag) {
if (token.type.equals(Type.tag)) {
if (options.tags && typeof options.tags[token.value] === 'function') {
const tagged = tokensToObject(tokeniser, options)
return options.tags[token.value](tagged)
Expand Down
26 changes: 14 additions & 12 deletions lib/diagnostic.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Tokeniser } from './decode.js'
import { Type } from './token.js'
import { toHex, fromHex } from './byte-utils.js'
import { uintBoundaries } from './0uint.js'

Expand Down Expand Up @@ -31,25 +32,25 @@ function * tokensToDiagnostic (inp, width = 100) {
/** @type {string|number} */
let v = String(token.value)
let outp = `${margin}${slc(0, 1)}`
const str = token.type.name === 'bytes' || token.type.name === 'string'
if (token.type.name === 'string') {
const str = token.type.equals(Type.bytes) || token.type.equals(Type.string)
if (token.type.equals(Type.string)) {
v = v.length
vLength -= v
} else if (token.type.name === 'bytes') {
} else if (token.type.equals(Type.bytes)) {
v = token.value.length
// @ts-ignore
vLength -= v
}

let multilen
switch (token.type.name) {
case 'string':
case 'bytes':
case 'map':
case 'array':
case Type.string.name:
case Type.bytes.name:
case Type.map.name:
case Type.array.name:
// for bytes and string, we want to print out the length part of the value prefix if it
// exists - it exists for short lengths (<24) but does for longer lengths
multilen = token.type.name === 'string' ? utf8Encoder.encode(token.value).length : token.value.length
multilen = token.type.equals(Type.string) ? utf8Encoder.encode(token.value).length : token.value.length
if (multilen >= uintBoundaries[0]) {
if (multilen < uintBoundaries[1]) {
outp += ` ${slc(1, 1)}`
Expand All @@ -71,13 +72,14 @@ function * tokensToDiagnostic (inp, width = 100) {

outp = outp.padEnd(width / 2, ' ')
outp += `# ${margin}${token.type.name}`
// there should be a method to get a Type from a String
if (token.type.name !== v) {
outp += `(${v})`
}
yield outp

if (str) {
let asString = token.type.name === 'string'
let asString = token.type.equals(Type.string)
margin += ' '
let repr = asString ? utf8Encoder.encode(token.value) : token.value
if (asString && token.byteValue !== undefined) {
Expand Down Expand Up @@ -110,15 +112,15 @@ function * tokensToDiagnostic (inp, width = 100) {
}
if (!token.type.terminal) {
switch (token.type.name) {
case 'map':
case Type.map.name:
indent.push(token.value * 2)
break
case 'array':
case Type.array.name:
indent.push(token.value)
break
// TODO: test tags .. somehow
/* c8 ignore next 5 */
case 'tag':
case Type.tag.name:
indent.push(1)
break
default:
Expand Down
141 changes: 63 additions & 78 deletions lib/json/encode.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,30 +5,50 @@ import { asU8A, fromString } from '../byte-utils.js'

/**
* @typedef {import('../../interface').EncodeOptions} EncodeOptions
* @typedef {import('../../interface').TokenTypeEncoder} TokenTypeEncoder
* @typedef {import('../token').Token} Token
* @typedef {import('../bl').Bl} Bl
*/

class JSONEncoder extends Array {
constructor () {
super()
/** @type {{type:Type,elements:number}[]} */
this.inRecursive = []
/**
* @param {(buf: Bl, token: Token) => void} action
* @returns {TokenTypeEncoder}
*/
function wrapCompareTokens (action) {
const wrapped = (/** @type {Bl} */ buf, /** @type {Token} */token) => action(buf, token)
/**
* @param {Token} tok1
* @param {Token} tok2
* @returns {number}
*/
wrapped.compareTokens = function compareTokens (tok1, tok2) {
return tok1.value < tok2.value ? -1 : tok1.value > tok2.value ? 1 : 0
}
return wrapped
}

/**
* @returns {TokenTypeEncoder[]}
*/
function makeJSONEncoders() {
/** @type {TokenTypeEncoder[]} */
const encoders = []
/** @type {{type:Type,elements:number}[]} */
const inRecursive = []

/**
* @param {Bl} buf
*/
prefix (buf) {
const recurs = this.inRecursive[this.inRecursive.length - 1]
function prefix (buf) {
const recurs = inRecursive[inRecursive.length - 1]
if (recurs) {
if (recurs.type === Type.array) {
if (recurs.type.equals(Type.array)) {
recurs.elements++
if (recurs.elements !== 1) { // >first
buf.push([44]) // ','
}
}
if (recurs.type === Type.map) {
if (recurs.type.equals(Type.map)) {
recurs.elements++
if (recurs.elements !== 1) { // >first
if (recurs.elements % 2 === 1) { // key
Expand All @@ -41,87 +61,52 @@ class JSONEncoder extends Array {
}
}

/**
* @param {Bl} buf
* @param {Token} token
*/
[Type.uint.major] (buf, token) {
this.prefix(buf)
encoders[Type.uint.major] = wrapCompareTokens((buf, token) => {
prefix(buf)
const is = String(token.value)
const isa = []
for (let i = 0; i < is.length; i++) {
isa[i] = is.charCodeAt(i)
}
buf.push(isa)
}
})

/**
* @param {Bl} buf
* @param {Token} token
*/
[Type.negint.major] (buf, token) {
// @ts-ignore hack
this[Type.uint.major](buf, token)
}
encoders[Type.negint.major] = wrapCompareTokens((buf, token) => {
encoders[Type.uint.major](buf, token)
})

/**
* @param {Bl} _buf
* @param {Token} _token
*/
[Type.bytes.major] (_buf, _token) {
encoders[Type.bytes.major] = wrapCompareTokens((_buf, _token) => {
throw new Error(`${encodeErrPrefix} unsupported type: Uint8Array`)
}
})

/**
* @param {Bl} buf
* @param {Token} token
*/
[Type.string.major] (buf, token) {
this.prefix(buf)
// buf.push(34) // '"'
// encodeUtf8(token.value, byts)
// buf.push(34) // '"'
encoders[Type.string.major] = wrapCompareTokens((buf, token) => {
prefix(buf)
const byts = fromString(JSON.stringify(token.value))
buf.push(byts.length > 32 ? asU8A(byts) : byts)
}
})

/**
* @param {Bl} buf
* @param {Token} _token
*/
[Type.array.major] (buf, _token) {
this.prefix(buf)
this.inRecursive.push({ type: Type.array, elements: 0 })
encoders[Type.array.major] = wrapCompareTokens((buf, _token) => {
prefix(buf)
inRecursive.push({ type: Type.array, elements: 0 })
buf.push([91]) // '['
}
})

/**
* @param {Bl} buf
* @param {Token} _token
*/
[Type.map.major] (buf, _token) {
this.prefix(buf)
this.inRecursive.push({ type: Type.map, elements: 0 })
encoders[Type.map.major] = wrapCompareTokens((buf, _token) => {
prefix(buf)
inRecursive.push({ type: Type.map, elements: 0 })
buf.push([123]) // '{'
}
})

/**
* @param {Bl} _buf
* @param {Token} _token
*/
[Type.tag.major] (_buf, _token) {}
encoders[Type.tag.major] = wrapCompareTokens((_buf, _token) => {
})

/**
* @param {Bl} buf
* @param {Token} token
*/
[Type.float.major] (buf, token) {
if (token.type.name === 'break') {
const recurs = this.inRecursive.pop()
encoders[Type.float.major] = wrapCompareTokens((buf, token) => {
if (token.type.equals(Type.break)) {
const recurs = inRecursive.pop()
if (recurs) {
if (recurs.type === Type.array) {
if (recurs.type.equals(Type.array)) {
buf.push([93]) // ']'
} else if (recurs.type === Type.map) {
} else if (recurs.type.equals(Type.map)) {
buf.push([125]) // '}'
/* c8 ignore next 3 */
} else {
Expand All @@ -136,14 +121,14 @@ class JSONEncoder extends Array {
throw new Error(`${encodeErrPrefix} unsupported type: undefined`)
}

this.prefix(buf)
if (token.type.name === 'true') {
prefix(buf)
if (token.type.equals(Type.true)) {
buf.push([116, 114, 117, 101]) // 'true'
return
} else if (token.type.name === 'false') {
} else if (token.type.equals(Type.false)) {
buf.push([102, 97, 108, 115, 101]) // 'false'
return
} else if (token.type.name === 'null') {
} else if (token.type.equals(Type.null)) {
buf.push([110, 117, 108, 108]) // 'null'
return
}
Expand All @@ -163,7 +148,8 @@ class JSONEncoder extends Array {
isa.push(48) // '0'
}
buf.push(isa)
}
})
return encoders
}

// The below code is mostly taken and modified from https://github.com/feross/buffer
Expand Down Expand Up @@ -283,7 +269,7 @@ function mapSorter (e1, e2) {
}
const keyToken1 = e1[0]
const keyToken2 = e2[0]
if (keyToken1.type !== Type.string || keyToken2.type !== Type.string) {
if (!keyToken1.type.equals(Type.string) || !keyToken2.type.equals(Type.string)) {
throw new Error(`${encodeErrPrefix} non-string map keys are not supported`)
}
if (keyToken1 < keyToken2) {
Expand All @@ -305,8 +291,7 @@ const defaultEncodeOptions = { addBreakTokens: true, mapSorter }
*/
function encode (data, options) {
options = Object.assign({}, defaultEncodeOptions, options)
// @ts-ignore TokenTypeEncoder[] requires compareTokens() on each encoder, we don't use them here
return encodeCustom(data, new JSONEncoder(), options)
return encodeCustom(data, makeJSONEncoders(), options)
}

export { encode }
9 changes: 9 additions & 0 deletions lib/json/forward-cborg.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import { json } from 'cborg'

const { encode, decode, decodeFirst, Tokenizer } = json
export {
encode,
decode,
decodeFirst,
Tokenizer
}
Loading

0 comments on commit 938cd77

Please sign in to comment.