|
| 1 | +/** |
| 2 | + * Copyright 2019, OpenCensus Authors |
| 3 | + * |
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | + * you may not use this file except in compliance with the License. |
| 6 | + * You may obtain a copy of the License at |
| 7 | + * |
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | + * |
| 10 | + * Unless required by applicable law or agreed to in writing, software |
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | + * See the License for the specific language governing permissions and |
| 14 | + * limitations under the License. |
| 15 | + */ |
| 16 | + |
| 17 | +/** |
| 18 | + * This module contains the functions for serializing and deserializing |
| 19 | + * TagMap (TagContext) with the binary format. It allows tags to propagate |
| 20 | + * across requests. |
| 21 | + * |
| 22 | + * <p>OpenCensus tag context encoding: |
| 23 | + * |
| 24 | + * <ul> |
| 25 | + * <li>Tags are encoded in single byte sequence. The version 0 format is: |
| 26 | + * <li>{@code <version_id><encoded_tags>} |
| 27 | + * <li>{@code <version_id> -> a single byte, value 0} |
| 28 | + * <li>{@code <encoded_tags> -> (<tag_field_id><tag_encoding>)*} |
| 29 | + * <ul> |
| 30 | + * <li>{@code <tag_field_id>} -> a single byte, value 0 |
| 31 | + * <li>{@code <tag_encoding>}: |
| 32 | + * <ul> |
| 33 | + * <li>{@code <tag_key_len><tag_key><tag_val_len><tag_val>} |
| 34 | + * <ul> |
| 35 | + * <li>{@code <tag_key_len>} -> varint encoded integer |
| 36 | + * <li>{@code <tag_key>} -> tag_key_len bytes comprising tag name |
| 37 | + * <li>{@code <tag_val_len>} -> varint encoded integer |
| 38 | + * <li>{@code <tag_val>} -> tag_val_len bytes comprising tag value |
| 39 | + * </ul> |
| 40 | + * </li> |
| 41 | + * </ul> |
| 42 | + * </li> |
| 43 | + * </ul> |
| 44 | + * </ul> |
| 45 | + */ |
| 46 | + |
| 47 | +import {TagMap} from '../tag-map'; |
| 48 | +import {TagKey, TagValue} from '../types'; |
| 49 | +import {DecodeVarint, EncodeVarint} from './variant-encoding'; |
| 50 | + |
| 51 | +// This size limit only applies to the bytes representing tag keys and values. |
| 52 | +export const TAG_MAP_SERIALIZED_SIZE_LIMIT = 8192; |
| 53 | + |
| 54 | +const ENCODING = 'utf8'; |
| 55 | +const VERSION_ID = 0; |
| 56 | +const TAG_FIELD_ID = 0; |
| 57 | +const VERSION_ID_INDEX = 0; |
| 58 | + |
| 59 | +/** |
| 60 | + * Serializes a given TagMap to the on-the-wire format. |
| 61 | + * @param tagMap The TagMap to serialize. |
| 62 | + */ |
| 63 | +export function serializeBinary(tagMap: TagMap): Buffer { |
| 64 | + const byteArray: number[] = []; |
| 65 | + byteArray.push(VERSION_ID); |
| 66 | + let totalChars = 0; |
| 67 | + const tags = tagMap.tags; |
| 68 | + tags.forEach((tagValue: TagValue, tagKey: TagKey) => { |
| 69 | + totalChars += tagKey.name.length; |
| 70 | + totalChars += tagValue.value.length; |
| 71 | + encodeTag(tagKey, tagValue, byteArray); |
| 72 | + }); |
| 73 | + |
| 74 | + if (totalChars > TAG_MAP_SERIALIZED_SIZE_LIMIT) { |
| 75 | + throw new Error(`Size of TagMap exceeds the maximum serialized size ${ |
| 76 | + TAG_MAP_SERIALIZED_SIZE_LIMIT}`); |
| 77 | + } |
| 78 | + return Buffer.from(byteArray); |
| 79 | +} |
| 80 | + |
| 81 | +/** |
| 82 | + * Deserializes input to TagMap based on the binary format standard. |
| 83 | + * @param buffer The TagMap to deserialize. |
| 84 | + */ |
| 85 | +export function deserializeBinary(buffer: Buffer): TagMap { |
| 86 | + if (buffer.length === 0) { |
| 87 | + throw new Error('Input buffer can not be empty.'); |
| 88 | + } |
| 89 | + const versionId = buffer.readInt8(VERSION_ID_INDEX); |
| 90 | + if (versionId > VERSION_ID) { |
| 91 | + throw new Error(`Wrong Version ID: ${ |
| 92 | + versionId}. Currently supports version up to: ${VERSION_ID}`); |
| 93 | + } |
| 94 | + return parseTags(buffer); |
| 95 | +} |
| 96 | + |
| 97 | +function encodeTag(tagKey: TagKey, tagValue: TagValue, byteArray: number[]) { |
| 98 | + byteArray.push(TAG_FIELD_ID); |
| 99 | + encodeString(tagKey.name, byteArray); |
| 100 | + encodeString(tagValue.value, byteArray); |
| 101 | +} |
| 102 | + |
| 103 | +function encodeString(input: string, byteArray: number[]) { |
| 104 | + byteArray.push(...EncodeVarint(input.length)); |
| 105 | + byteArray.push(...input.split('').map(unicode)); |
| 106 | + return byteArray; |
| 107 | +} |
| 108 | + |
| 109 | +function parseTags(buffer: Buffer): TagMap { |
| 110 | + const tags = new TagMap(); |
| 111 | + const limit = buffer.length; |
| 112 | + let totalChars = 0; |
| 113 | + let currentIndex = 1; |
| 114 | + |
| 115 | + while (currentIndex < limit) { |
| 116 | + const fieldId = buffer.readInt8(currentIndex); |
| 117 | + if (fieldId > TAG_FIELD_ID) { |
| 118 | + // Stop parsing at the first unknown field ID, since there is no way to |
| 119 | + // know its length. |
| 120 | + break; |
| 121 | + } |
| 122 | + currentIndex += 1; |
| 123 | + const key = decodeString(buffer, currentIndex); |
| 124 | + currentIndex += key.length; |
| 125 | + totalChars += key.length; |
| 126 | + |
| 127 | + currentIndex += 1; |
| 128 | + const val = decodeString(buffer, currentIndex); |
| 129 | + currentIndex += val.length; |
| 130 | + totalChars += val.length; |
| 131 | + |
| 132 | + currentIndex += 1; |
| 133 | + if (totalChars > TAG_MAP_SERIALIZED_SIZE_LIMIT) { |
| 134 | + throw new Error(`Size of TagMap exceeds the maximum serialized size ${ |
| 135 | + TAG_MAP_SERIALIZED_SIZE_LIMIT}`); |
| 136 | + } else { |
| 137 | + tags.set({name: key}, {value: val}); |
| 138 | + } |
| 139 | + } |
| 140 | + return tags; |
| 141 | +} |
| 142 | + |
| 143 | +function decodeString(buffer: Buffer, offset: number): string { |
| 144 | + const length = DecodeVarint(buffer, offset); |
| 145 | + return buffer.toString(ENCODING, offset + 1, offset + 1 + length); |
| 146 | +} |
| 147 | + |
| 148 | +function unicode(x: string) { |
| 149 | + return x.charCodeAt(0); |
| 150 | +} |
0 commit comments