1
1
'use strict' ;
2
2
3
- const newBSON = require ( './register-bson' ) ;
3
+ const currentNodeBSON = require ( './register-bson' ) ;
4
+ const vm = require ( 'vm' ) ;
4
5
const fs = require ( 'fs' ) ;
5
6
const fetch = require ( 'node-fetch' ) . default ;
6
7
const rimraf = require ( 'rimraf' ) ;
7
8
const cp = require ( 'child_process' ) ;
8
9
9
10
/*
10
11
* This file tests that previous versions of BSON
11
- * serialize and deserialize correctly in the most recent version of BSON
12
+ * serialize and deserialize correctly in the most recent version of BSON,
13
+ * and that the different distributions (browser, Node.js, etc.) of the
14
+ * most recent version are mutually compatible as well.
12
15
*
13
16
* This is an unusual situation to run into as users should be using one BSON lib version
14
17
* but it does arise with sub deps etc. and we wish to protect against unexpected behavior
@@ -39,14 +42,14 @@ function downloadZip(version, done) {
39
42
} ) ;
40
43
}
41
44
42
- describe ( 'Current version' , function ( ) {
45
+ describe ( 'Mutual version and distribution compatibility ' , function ( ) {
43
46
OLD_VERSIONS . forEach ( version => {
44
47
before ( function ( done ) {
45
48
this . timeout ( 30000 ) ; // Downloading may take a few seconds.
46
49
if ( Number ( process . version . split ( '.' ) [ 0 ] . substring ( 1 ) ) < 8 ) {
47
50
// WHATWG fetch doesn't download correctly prior to node 8
48
51
// but we should be safe by testing on node 8 +
49
- this . skip ( ) ;
52
+ return done ( ) ;
50
53
}
51
54
if ( fs . existsSync ( `bson-${ version } .zip` ) ) {
52
55
fs . unlinkSync ( `bson-${ version } .zip` ) ;
@@ -73,18 +76,160 @@ describe('Current version', function () {
73
76
done ( ) ;
74
77
} ) ;
75
78
} ) ;
79
+ } ) ;
76
80
77
- it ( `serializes correctly against ${ version } Binary class` , function ( ) {
78
- const oldBSON = require ( getImportPath ( version ) ) ;
79
- const binFromNew = {
80
- binary : new newBSON . Binary ( 'aaaa' )
81
- } ;
82
- const binFromOld = {
83
- binary : new oldBSON . Binary ( 'aaaa' )
84
- } ;
85
- expect ( oldBSON . prototype . serialize ( binFromNew ) . toString ( 'hex' ) ) . to . equal (
86
- newBSON . serialize ( binFromOld ) . toString ( 'hex' )
81
+ // Node.js requires an .mjs filename extension for loading ES modules.
82
+ before ( ( ) => {
83
+ try {
84
+ fs . writeFileSync (
85
+ './bson.browser.esm.mjs' ,
86
+ fs . readFileSync ( __dirname + '/../dist/bson.browser.esm.js' )
87
87
) ;
88
- } ) ;
88
+ fs . writeFileSync ( './bson.esm.mjs' , fs . readFileSync ( __dirname + '/../dist/bson.esm.js' ) ) ;
89
+ } catch ( e ) {
90
+ // bundling fails in CI on Windows, no idea why, hence also the
91
+ // process.platform !== 'win32' check below
92
+ }
93
+ } ) ;
94
+
95
+ after ( ( ) => {
96
+ try {
97
+ fs . unlinkSync ( './bson.browser.esm.mjs' ) ;
98
+ fs . unlinkSync ( './bson.esm.mjs' ) ;
99
+ } catch ( e ) {
100
+ // ignore
101
+ }
89
102
} ) ;
103
+
104
+ const variants = OLD_VERSIONS . map ( version => ( {
105
+ name : `legacy ${ version } ` ,
106
+ load : ( ) => {
107
+ const bson = require ( getImportPath ( version ) ) ;
108
+ bson . serialize = bson . prototype . serialize ;
109
+ bson . deserialize = bson . prototype . deserialize ;
110
+ return Promise . resolve ( bson ) ;
111
+ } ,
112
+ legacy : true
113
+ } ) ) . concat ( [
114
+ {
115
+ name : 'Node.js lib/bson' ,
116
+ load : ( ) => Promise . resolve ( currentNodeBSON )
117
+ } ,
118
+ {
119
+ name : 'Browser ESM' ,
120
+ // eval because import is a syntax error in earlier Node.js versions
121
+ // that are still supported in CI
122
+ load : ( ) => eval ( `import("${ __dirname } /../bson.browser.esm.mjs")` ) ,
123
+ usesBufferPolyfill : true
124
+ } ,
125
+ {
126
+ name : 'Browser UMD' ,
127
+ load : ( ) => Promise . resolve ( require ( '../dist/bson.browser.umd.js' ) ) ,
128
+ usesBufferPolyfill : true
129
+ } ,
130
+ {
131
+ name : 'Generic bundle' ,
132
+ load : ( ) => {
133
+ const source = fs . readFileSync ( __dirname + '/../dist/bson.bundle.js' , 'utf8' ) ;
134
+ return Promise . resolve ( vm . runInNewContext ( `${ source } ; BSON` , { global, process } ) ) ;
135
+ } ,
136
+ usesBufferPolyfill : true
137
+ } ,
138
+ {
139
+ name : 'Node.js ESM' ,
140
+ load : ( ) => eval ( `import("${ __dirname } /../bson.esm.mjs")` )
141
+ }
142
+ ] ) ;
143
+
144
+ const makeObjects = bson => [
145
+ new bson . ObjectId ( '5f16b8bebe434dc98cdfc9ca' ) ,
146
+ new bson . DBRef ( 'a' , new bson . ObjectId ( '5f16b8bebe434dc98cdfc9cb' ) , 'db' ) ,
147
+ new bson . MinKey ( ) ,
148
+ new bson . MaxKey ( ) ,
149
+ new bson . Timestamp ( 1 , 100 ) ,
150
+ new bson . Code ( 'abc' ) ,
151
+ bson . Decimal128 . fromString ( '1' ) ,
152
+ bson . Long . fromString ( '1' ) ,
153
+ new bson . Binary ( Buffer . from ( 'abcäbc🎉' ) , 128 ) ,
154
+ new Date ( '2021-05-04T15:49:33.000Z' ) ,
155
+ / m a t c h /
156
+ ] ;
157
+
158
+ for ( const from of variants ) {
159
+ for ( const to of variants ) {
160
+ describe ( `serializing objects from ${ from . name } using ${ to . name } ` , ( ) => {
161
+ let fromObjects ;
162
+ let fromBSON ;
163
+ let toBSON ;
164
+
165
+ before ( function ( ) {
166
+ // Load the from/to BSON versions asynchronously because e.g. ESM
167
+ // requires asynchronous loading.
168
+ return Promise . resolve ( )
169
+ . then ( ( ) => {
170
+ return from . load ( ) ;
171
+ } )
172
+ . then ( loaded => {
173
+ fromBSON = loaded ;
174
+ return to . load ( ) ;
175
+ } )
176
+ . then ( loaded => {
177
+ toBSON = loaded ;
178
+ } )
179
+ . then (
180
+ ( ) => {
181
+ fromObjects = makeObjects ( fromBSON ) ;
182
+ } ,
183
+ err => {
184
+ if ( + process . version . slice ( 1 ) . split ( '.' ) [ 0 ] >= 12 && process . platform !== 'win32' ) {
185
+ throw err ; // On Node.js 12+, all loading is expected to work.
186
+ } else {
187
+ this . skip ( ) ; // Otherwise, e.g. ESM can't be loaded, so just skip.
188
+ }
189
+ }
190
+ ) ;
191
+ } ) ;
192
+
193
+ it ( 'serializes in a compatible way' , function ( ) {
194
+ for ( const object of fromObjects ) {
195
+ // If the object in question uses Buffers in its serialization, and
196
+ // its Buffer was created using the polyfill, and we're serializing
197
+ // using a legacy version that uses buf.copy(), then that fails
198
+ // because the Buffer polyfill's typechecking is buggy, so we
199
+ // skip these cases.
200
+ // This is tracked as https://jira.mongodb.org/browse/NODE-2848
201
+ // and would be addressed by https://github.com/feross/buffer/pull/285
202
+ // if that is merged at some point.
203
+ if (
204
+ from . usesBufferPolyfill &&
205
+ to . legacy &&
206
+ [ 'ObjectId' , 'Decimal128' , 'DBRef' , 'Binary' ] . includes ( object . constructor . name )
207
+ ) {
208
+ continue ;
209
+ }
210
+
211
+ try {
212
+ // Check that both BSON versions serialize to equal Buffers
213
+ expect ( toBSON . serialize ( { object } ) . toString ( 'hex' ) ) . to . equal (
214
+ fromBSON . serialize ( { object } ) . toString ( 'hex' )
215
+ ) ;
216
+ if ( ! from . legacy ) {
217
+ // Check that serializing using one version and deserializing using
218
+ // the other gives back the original object.
219
+ const cloned = fromBSON . deserialize ( toBSON . serialize ( { object } ) ) . object ;
220
+ expect ( fromBSON . EJSON . serialize ( cloned ) ) . to . deep . equal (
221
+ fromBSON . EJSON . serialize ( object )
222
+ ) ;
223
+ }
224
+ } catch ( err ) {
225
+ // If something fails, note the object type in the error message
226
+ // for easier debugging.
227
+ err . message += ` (${ object . constructor . name } )` ;
228
+ throw err ;
229
+ }
230
+ }
231
+ } ) ;
232
+ } ) ;
233
+ }
234
+ }
90
235
} ) ;
0 commit comments