Skip to content

Commit 72871fc

Browse files
asthamohtagcf-owl-bot[bot]release-please[bot]
authored
fix: for merging when array/struct chunks contain null (#1541)
* fix: for merging when array/struct chunks contain null * fix: fix for new @types/node version (#1542) * fix: fix for new @types/node version * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com> * chore(main): release 5.16.2 (#1543) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> * fix: adding test case for chunked struct with a null array Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com> Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com>
1 parent 34f69e7 commit 72871fc

File tree

3 files changed

+143
-0
lines changed

3 files changed

+143
-0
lines changed

src/partial-result-stream.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -368,6 +368,9 @@ export class PartialResultStream extends Transform implements ResultEvents {
368368
type.code === google.spanner.v1.TypeCode.STRUCT ||
369369
type.code === 'STRUCT'
370370
) {
371+
if (head === null || tail === null) {
372+
return [head, tail];
373+
}
371374
return [PartialResultStream.mergeLists(type, head, tail)];
372375
}
373376

test/data/streaming-read-acceptance-test.json

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -332,6 +332,32 @@
332332
"{\n \"values\": [\"f\"]\n}"
333333
],
334334
"name": "Multiple Row Chunks/Non Chunks Interleaved"
335+
},
336+
{
337+
"result": {
338+
"value": [
339+
[
340+
[
341+
[
342+
[
343+
[
344+
"abc"
345+
],
346+
null,
347+
[
348+
"def"
349+
]
350+
]
351+
]
352+
]
353+
]
354+
]
355+
},
356+
"chunks": [
357+
"{\n \"metadata\": {\n \"rowType\": {\n \"fields\": [{\n \"name\": \"f1\",\n \"type\": {\n \"code\": \"ARRAY\",\n \"arrayElementType\": {\n \"code\": \"STRUCT\",\n \"structType\": {\n \"fields\": [{\n \"name\": \"f11\",\n \"type\": {\n \"code\": \"ARRAY\",\n \"arrayElementType\": {\n \"code\": \"STRUCT\",\n \"structType\": {\n \"fields\": [{\n \"name\": \"f12\",\n \"type\": {\n \"code\": \"STRING\"\n }\n }]\n }\n }\n }\n }]\n }\n }\n }\n }]\n }\n },\n \"values\": [[[[[\"abc\"], null]]]],\n \"chunkedValue\": true\n}",
358+
"{\n \"values\": [[[[[\"def\"]]]]]\n}"
359+
],
360+
"name": "Nested Struct Array Chunking Test With null"
335361
}
336362
]
337363
}

test/spanner.ts

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,8 @@ import RequestOptions = google.spanner.v1.RequestOptions;
5454
import PartialResultSet = google.spanner.v1.PartialResultSet;
5555
import protobuf = google.spanner.v1;
5656
import Priority = google.spanner.v1.RequestOptions.Priority;
57+
import TypeCode = google.spanner.v1.TypeCode;
58+
import NullValue = google.protobuf.NullValue;
5759

5860
function numberToEnglishWord(num: number): string {
5961
switch (num) {
@@ -3389,6 +3391,118 @@ describe('Spanner with mock server', () => {
33893391
}
33903392
});
33913393

3394+
it('should return all values from PartialResultSet with chunked struct with a null array field', async () => {
3395+
const sql = 'SELECT * FROM TestTable';
3396+
const prs1 = PartialResultSet.create({
3397+
metadata: createArrayOfStructMetadata(),
3398+
values: [
3399+
{
3400+
listValue: {
3401+
values: [
3402+
{
3403+
listValue: {
3404+
values: [
3405+
// The array field is NULL.
3406+
{nullValue: NullValue.NULL_VALUE},
3407+
],
3408+
},
3409+
},
3410+
],
3411+
},
3412+
},
3413+
],
3414+
// This PartialResultSet is chunked, and the last value was the NULL value for the ARRAY field.
3415+
// This means that the next value will be the STRING field.
3416+
chunkedValue: true,
3417+
});
3418+
const prs2 = PartialResultSet.create({
3419+
values: [
3420+
{
3421+
listValue: {
3422+
values: [
3423+
{
3424+
listValue: {
3425+
values: [{stringValue: 'First row'}],
3426+
},
3427+
},
3428+
],
3429+
},
3430+
},
3431+
{
3432+
listValue: {
3433+
values: [
3434+
{
3435+
listValue: {
3436+
values: [
3437+
{listValue: {values: [{stringValue: '1'}]}},
3438+
{stringValue: 'Second row'},
3439+
],
3440+
},
3441+
},
3442+
],
3443+
},
3444+
},
3445+
],
3446+
});
3447+
setupResultsAndErrors(sql, [prs1, prs2], []);
3448+
const database = newTestDatabase();
3449+
try {
3450+
const [rows] = (await database.run({
3451+
sql,
3452+
json: true,
3453+
})) as Json[];
3454+
assert.strictEqual(rows.length, 2);
3455+
assert.strictEqual(rows[0].outerArray.length, 1);
3456+
assert.strictEqual(rows[0].outerArray[0].innerField, 'First row');
3457+
assert.ok(
3458+
rows[0].outerArray[0].innerArray === null,
3459+
'Inner array should be null'
3460+
);
3461+
assert.strictEqual(rows[1].outerArray.length, 1);
3462+
assert.strictEqual(rows[1].outerArray[0].innerField, 'Second row');
3463+
assert.strictEqual(rows[1].outerArray[0].innerArray.length, 1);
3464+
assert.strictEqual(rows[1].outerArray[0].innerArray[0], '1');
3465+
} finally {
3466+
await database.close();
3467+
}
3468+
});
3469+
3470+
function createArrayOfStructMetadata() {
3471+
const fields = [
3472+
protobuf.StructType.Field.create({
3473+
name: 'outerArray',
3474+
type: protobuf.Type.create({
3475+
code: protobuf.TypeCode.ARRAY,
3476+
arrayElementType: protobuf.Type.create({
3477+
code: protobuf.TypeCode.STRUCT,
3478+
structType: protobuf.StructType.create({
3479+
fields: [
3480+
{
3481+
name: 'innerArray',
3482+
type: protobuf.Type.create({
3483+
code: TypeCode.ARRAY,
3484+
arrayElementType: protobuf.Type.create({
3485+
code: TypeCode.STRING,
3486+
}),
3487+
}),
3488+
},
3489+
{
3490+
name: 'innerField',
3491+
type: protobuf.Type.create({code: TypeCode.STRING}),
3492+
},
3493+
],
3494+
}),
3495+
}),
3496+
}),
3497+
}),
3498+
];
3499+
return new protobuf.ResultSetMetadata({
3500+
rowType: new protobuf.StructType({
3501+
fields,
3502+
}),
3503+
});
3504+
}
3505+
33923506
it('should reset to the chunked value of the last PartialResultSet with a resume token on retry', async () => {
33933507
// This tests the following scenario:
33943508
// 1. PartialResultSet without resume token, no chunked value.

0 commit comments

Comments
 (0)