Skip to content

Commit 1064783

Browse files
committed
test(NODE-6534): add spec test runner for Binary vector
1 parent a5ed30d commit 1064783

File tree

1 file changed

+6
-68
lines changed

1 file changed

+6
-68
lines changed

test/node/bson_binary_vector.spec.test.ts

Lines changed: 6 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,6 @@ import { expect } from 'chai';
55

66
const { toHex, fromHex } = BSON.onDemand.ByteUtils;
77

8-
const FLOAT = new Float64Array(1);
9-
const FLOAT_BYTES = new Uint8Array(FLOAT.buffer, 0, 8);
10-
11-
FLOAT[0] = -1;
12-
// Little endian [0, 0, 0, 0, 0, 0, 240, 191]
13-
// Big endian [191, 240, 0, 0, 0, 0, 0, 0]
14-
const isBigEndian = FLOAT_BYTES[7] === 0;
15-
168
type DTypeAlias = 'INT8' | 'FLOAT32' | 'PACKED_BIT';
179
type VectorTest = {
1810
description: string;
@@ -25,36 +17,6 @@ type VectorTest = {
2517
};
2618
type VectorSuite = { description: string; test_key: string; tests: VectorTest[] };
2719

28-
function validateVector(vector: Binary): void {
29-
const VECTOR_TYPE = Object.freeze({
30-
Int8: 0x03,
31-
Float32: 0x27,
32-
PackedBit: 0x10
33-
} as const);
34-
35-
if (vector.sub_type !== 9) return;
36-
37-
const size = vector.position;
38-
const d_type = vector.buffer[0] ?? 0;
39-
const padding = vector.buffer[1] ?? 0;
40-
41-
if ((d_type === VECTOR_TYPE.Float32 || d_type === VECTOR_TYPE.Int8) && padding !== 0) {
42-
throw new BSONError('Invalid Vector: padding must be zero for int8 and float32 vectors');
43-
}
44-
45-
if (d_type === VECTOR_TYPE.PackedBit && padding !== 0 && size === 2) {
46-
throw new BSONError(
47-
'Invalid Vector: padding must be zero for packed bit vectors that are empty'
48-
);
49-
}
50-
51-
if (d_type === VECTOR_TYPE.PackedBit && padding > 7) {
52-
throw new BSONError(
53-
`Invalid Vector: padding must be a value between 0 and 7. found: ${padding}`
54-
);
55-
}
56-
}
57-
5820
function fixFloats(f: string | number): number {
5921
if (typeof f === 'number') {
6022
return f;
@@ -97,34 +59,14 @@ function make(
9759
let binary: Binary;
9860
switch (dtype_alias) {
9961
case 'PACKED_BIT':
100-
case 'INT8': {
101-
const array = new Int8Array(vector.map(dtype_alias === 'INT8' ? fixInt8s : fixBits));
102-
const buffer = new Uint8Array(array.byteLength + 2);
103-
buffer[0] = +dtype_hex;
104-
buffer[1] = padding;
105-
buffer.set(new Uint8Array(array.buffer), 2);
106-
binary = new Binary(buffer, 9);
62+
binary = Binary.fromPackedBits(new Uint8Array(vector.map(fixBits)), padding);
10763
break;
108-
}
109-
110-
case 'FLOAT32': {
111-
const array = new Float32Array(vector.map(fixFloats));
112-
const buffer = new Uint8Array(array.byteLength + 2);
113-
buffer[0] = +dtype_hex;
114-
buffer[1] = padding;
115-
if (isBigEndian) {
116-
for (let i = 0; i < array.length; i++) {
117-
const bytes = new Uint8Array(array.buffer, i * 4, 4);
118-
bytes.reverse();
119-
buffer.set(bytes, i * 4 + 2);
120-
}
121-
} else {
122-
buffer.set(new Uint8Array(array.buffer), 2);
123-
}
124-
binary = new Binary(buffer, 9);
64+
case 'INT8':
65+
binary = Binary.fromInt8Array(new Int8Array(vector.map(fixInt8s)));
66+
break;
67+
case 'FLOAT32':
68+
binary = Binary.fromFloat32Array(new Float32Array(vector.map(fixFloats)));
12569
break;
126-
}
127-
12870
default:
12971
throw new Error(`Unknown dtype_alias: ${dtype_alias}`);
13072
}
@@ -188,8 +130,6 @@ describe('BSON Binary Vector spec tests', () => {
188130
try {
189131
const bin = make(test.vector, test.dtype_hex, test.dtype_alias, test.padding);
190132
BSON.serialize({ bin });
191-
// TODO(NODE-6537): The following validation MUST be a part of serialize
192-
validateVector(bin);
193133
} catch (error) {
194134
thrownError = error;
195135
}
@@ -205,8 +145,6 @@ describe('BSON Binary Vector spec tests', () => {
205145
try {
206146
const bin = make(test.vector, test.dtype_hex, test.dtype_alias, test.padding);
207147
BSON.EJSON.stringify({ bin });
208-
// TODO(NODE-6537): The following validation MUST be a part of stringify
209-
validateVector(bin);
210148
} catch (error) {
211149
thrownError = error;
212150
}

0 commit comments

Comments
 (0)