@@ -54,6 +54,18 @@ var streamRouter = require('../common/stream-router.js');
54
54
*/
55
55
var util = require ( '../common/util.js' ) ;
56
56
57
+ /**
58
+ * The file formats accepted by BigQuery.
59
+ *
60
+ * @type {object }
61
+ * @private
62
+ */
63
+ var FORMATS = {
64
+ avro : 'AVRO' ,
65
+ csv : 'CSV' ,
66
+ json : 'NEWLINE_DELIMITED_JSON'
67
+ } ;
68
+
57
69
/*! Developer Documentation
58
70
*
59
71
* @param {module:bigquery/dataset } dataset - Dataset instance.
@@ -293,7 +305,7 @@ Table.mergeSchemaWithRows_ = function(schema, rows) {
293
305
* table.copy(yourTable, metadata, function(err, job, apiResponse) {});
294
306
*/
295
307
Table . prototype . copy = function ( destination , metadata , callback ) {
296
- var that = this ;
308
+ var self = this ;
297
309
298
310
if ( ! ( destination instanceof Table ) ) {
299
311
throw new Error ( 'Destination must be a Table object.' ) ;
@@ -331,7 +343,7 @@ Table.prototype.copy = function(destination, metadata, callback) {
331
343
return ;
332
344
}
333
345
334
- var job = that . bigQuery . job ( resp . jobReference . jobId ) ;
346
+ var job = self . bigQuery . job ( resp . jobReference . jobId ) ;
335
347
job . metadata = resp ;
336
348
337
349
callback ( null , job , resp ) ;
@@ -361,8 +373,8 @@ Table.prototype.createReadStream = function() {
361
373
} ;
362
374
363
375
/**
364
- * Load data into your table from a readable stream of JSON or CSV-formatted
365
- * data.
376
+ * Load data into your table from a readable stream of JSON, CSV, or
377
+ * AVRO data.
366
378
*
367
379
* @resource [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
368
380
*
@@ -404,21 +416,17 @@ Table.prototype.createReadStream = function() {
404
416
* .on('complete', function(job) {});
405
417
*/
406
418
Table . prototype . createWriteStream = function ( metadata ) {
407
- var that = this ;
419
+ var self = this ;
408
420
409
421
metadata = metadata || { } ;
410
422
411
- var fileTypeMap = {
412
- csv : 'CSV' ,
413
- json : 'NEWLINE_DELIMITED_JSON'
414
- } ;
415
- var fileTypes = Object . keys ( fileTypeMap ) . map ( function ( key ) {
416
- return fileTypeMap [ key ] ;
423
+ var fileTypes = Object . keys ( FORMATS ) . map ( function ( key ) {
424
+ return FORMATS [ key ] ;
417
425
} ) ;
418
426
419
427
if ( is . string ( metadata ) ) {
420
428
metadata = {
421
- sourceFormat : fileTypeMap [ metadata . toLowerCase ( ) ]
429
+ sourceFormat : FORMATS [ metadata . toLowerCase ( ) ]
422
430
} ;
423
431
}
424
432
@@ -428,9 +436,9 @@ Table.prototype.createWriteStream = function(metadata) {
428
436
429
437
extend ( true , metadata , {
430
438
destinationTable : {
431
- projectId : that . bigQuery . projectId ,
432
- datasetId : that . dataset . id ,
433
- tableId : that . id
439
+ projectId : self . bigQuery . projectId ,
440
+ datasetId : self . dataset . id ,
441
+ tableId : self . id
434
442
}
435
443
} ) ;
436
444
@@ -443,7 +451,7 @@ Table.prototype.createWriteStream = function(metadata) {
443
451
444
452
dup . once ( 'writing' , function ( ) {
445
453
util . makeWritableStream ( dup , {
446
- makeAuthenticatedRequest : that . bigQuery . makeAuthenticatedRequest ,
454
+ makeAuthenticatedRequest : self . bigQuery . makeAuthenticatedRequest ,
447
455
metadata : {
448
456
configuration : {
449
457
load : metadata
@@ -452,11 +460,11 @@ Table.prototype.createWriteStream = function(metadata) {
452
460
request : {
453
461
uri : format ( '{base}/{projectId}/jobs' , {
454
462
base : 'https://www.googleapis.com/upload/bigquery/v2/projects' ,
455
- projectId : that . bigQuery . projectId
463
+ projectId : self . bigQuery . projectId
456
464
} )
457
465
}
458
466
} , function ( data ) {
459
- var job = that . bigQuery . job ( data . jobReference . jobId ) ;
467
+ var job = self . bigQuery . job ( data . jobReference . jobId ) ;
460
468
job . metadata = data ;
461
469
462
470
dup . emit ( 'complete' , job ) ;
@@ -523,19 +531,13 @@ Table.prototype.createWriteStream = function(metadata) {
523
531
* ], options, function(err, job, apiResponse) {});
524
532
*/
525
533
Table . prototype . export = function ( destination , options , callback ) {
526
- var that = this ;
534
+ var self = this ;
527
535
528
536
if ( is . fn ( options ) ) {
529
537
callback = options ;
530
538
options = { } ;
531
539
}
532
540
533
- var formats = {
534
- avro : 'AVRO' ,
535
- csv : 'CSV' ,
536
- json : 'NEWLINE_DELIMITED_JSON'
537
- } ;
538
-
539
541
extend ( true , options , {
540
542
destinationUris : arrify ( destination ) . map ( function ( dest ) {
541
543
if ( ! ( dest instanceof File ) ) {
@@ -545,8 +547,8 @@ Table.prototype.export = function(destination, options, callback) {
545
547
// If no explicit format was provided, attempt to find a match from the
546
548
// file's extension. If no match, don't set, and default upstream to CSV.
547
549
var format = path . extname ( dest . name ) . substr ( 1 ) . toLowerCase ( ) ;
548
- if ( ! options . destinationFormat && ! options . format && formats [ format ] ) {
549
- options . destinationFormat = formats [ format ] ;
550
+ if ( ! options . destinationFormat && ! options . format && FORMATS [ format ] ) {
551
+ options . destinationFormat = FORMATS [ format ] ;
550
552
}
551
553
552
554
return 'gs://' + dest . bucket . name + '/' + dest . name ;
@@ -556,8 +558,8 @@ Table.prototype.export = function(destination, options, callback) {
556
558
if ( options . format ) {
557
559
options . format = options . format . toLowerCase ( ) ;
558
560
559
- if ( formats [ options . format ] ) {
560
- options . destinationFormat = formats [ options . format ] ;
561
+ if ( FORMATS [ options . format ] ) {
562
+ options . destinationFormat = FORMATS [ options . format ] ;
561
563
delete options . format ;
562
564
} else {
563
565
throw new Error ( 'Destination format not recognized: ' + options . format ) ;
@@ -591,7 +593,7 @@ Table.prototype.export = function(destination, options, callback) {
591
593
return ;
592
594
}
593
595
594
- var job = that . bigQuery . job ( resp . jobReference . jobId ) ;
596
+ var job = self . bigQuery . job ( resp . jobReference . jobId ) ;
595
597
job . metadata = resp ;
596
598
597
599
callback ( null , job , resp ) ;
@@ -658,7 +660,7 @@ Table.prototype.export = function(destination, options, callback) {
658
660
* });
659
661
*/
660
662
Table . prototype . getRows = function ( options , callback ) {
661
- var that = this ;
663
+ var self = this ;
662
664
663
665
if ( is . fn ( options ) ) {
664
666
callback = options ;
@@ -684,9 +686,9 @@ Table.prototype.getRows = function(options, callback) {
684
686
} ) ;
685
687
}
686
688
687
- if ( resp . rows && resp . rows . length > 0 && ! that . metadata . schema ) {
689
+ if ( resp . rows && resp . rows . length > 0 && ! self . metadata . schema ) {
688
690
// We don't know the schema for this table yet. Do a quick stat.
689
- that . getMetadata ( function ( err , metadata , apiResponse ) {
691
+ self . getMetadata ( function ( err , metadata , apiResponse ) {
690
692
if ( err ) {
691
693
onComplete ( err , null , null , apiResponse ) ;
692
694
return ;
@@ -707,7 +709,7 @@ Table.prototype.getRows = function(options, callback) {
707
709
return ;
708
710
}
709
711
710
- rows = Table . mergeSchemaWithRows_ ( that . metadata . schema , rows || [ ] ) ;
712
+ rows = Table . mergeSchemaWithRows_ ( self . metadata . schema , rows || [ ] ) ;
711
713
712
714
callback ( null , rows , nextQuery , resp ) ;
713
715
}
@@ -720,16 +722,17 @@ Table.prototype.getRows = function(options, callback) {
720
722
* asynchronously. If you would like instantaneous access to your data, insert
721
723
* it using {module:bigquery/table#insert}.
722
724
*
723
- * Note: Only JSON and CSV source files are supported. The file type will be
724
- * inferred by the given file's extension. If you wish to override this, you
725
- * must provide a `metadata` object.
725
+ * Note: The file type will be inferred by the given file's extension. If you
726
+ * wish to override this, you must provide `metadata.format`.
726
727
*
727
728
* @resource [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert}
728
729
*
729
730
* @param {string|module:storage/file } source - The source file to import.
730
731
* @param {object= } metadata - Metadata to set with the load operation. The
731
732
* metadata object should be in the format of the
732
733
* [`configuration.load`](http://goo.gl/BVcXk4) property of a Jobs resource.
734
+ * @param {string } metadata.format - The format the data being imported is in.
735
+ * Allowed options are "CSV", "JSON", or "AVRO".
733
736
* @param {function } callback - The callback function.
734
737
* @param {?error } callback.err - An error returned while making this request
735
738
* @param {module:bigquery/job } callback.job - The job used to import your data.
@@ -772,7 +775,7 @@ Table.prototype.getRows = function(options, callback) {
772
775
* ], function(err, job, apiResponse) {});
773
776
*/
774
777
Table . prototype . import = function ( source , metadata , callback ) {
775
- var that = this ;
778
+ var self = this ;
776
779
777
780
if ( is . fn ( metadata ) ) {
778
781
callback = metadata ;
@@ -782,17 +785,18 @@ Table.prototype.import = function(source, metadata, callback) {
782
785
callback = callback || util . noop ;
783
786
metadata = metadata || { } ;
784
787
785
- var formats = {
786
- csv : 'CSV' ,
787
- json : 'NEWLINE_DELIMITED_JSON'
788
- } ;
788
+ var format = metadata . sourceFormat || metadata . format ;
789
+ if ( format ) {
790
+ metadata . sourceFormat = FORMATS [ format . toLowerCase ( ) ] ;
791
+ delete metadata . format ;
792
+ }
789
793
790
794
if ( is . string ( source ) ) {
791
795
// A path to a file was given. If a sourceFormat wasn't specified, try to
792
796
// find a match from the file's extension.
793
- var format = formats [ path . extname ( source ) . substr ( 1 ) . toLowerCase ( ) ] ;
794
- if ( ! metadata . sourceFormat && format ) {
795
- metadata . sourceFormat = format ;
797
+ var detectedFormat = FORMATS [ path . extname ( source ) . substr ( 1 ) . toLowerCase ( ) ] ;
798
+ if ( ! metadata . sourceFormat && detectedFormat ) {
799
+ metadata . sourceFormat = detectedFormat ;
796
800
}
797
801
798
802
// Read the file into a new write stream.
@@ -826,7 +830,7 @@ Table.prototype.import = function(source, metadata, callback) {
826
830
// If no explicit format was provided, attempt to find a match from
827
831
// the file's extension. If no match, don't set, and default upstream
828
832
// to CSV.
829
- var format = formats [ path . extname ( src . name ) . substr ( 1 ) . toLowerCase ( ) ] ;
833
+ var format = FORMATS [ path . extname ( src . name ) . substr ( 1 ) . toLowerCase ( ) ] ;
830
834
if ( ! metadata . sourceFormat && format ) {
831
835
body . configuration . load . sourceFormat = format ;
832
836
}
@@ -845,7 +849,7 @@ Table.prototype.import = function(source, metadata, callback) {
845
849
return ;
846
850
}
847
851
848
- var job = that . bigQuery . job ( resp . jobReference . jobId ) ;
852
+ var job = self . bigQuery . job ( resp . jobReference . jobId ) ;
849
853
job . metadata = resp ;
850
854
851
855
callback ( null , job , resp ) ;
@@ -1028,7 +1032,7 @@ Table.prototype.query = function(query, callback) {
1028
1032
* table.setMetadata(metadata, function(err, metadata, apiResponse) {});
1029
1033
*/
1030
1034
Table . prototype . setMetadata = function ( metadata , callback ) {
1031
- var that = this ;
1035
+ var self = this ;
1032
1036
1033
1037
if ( metadata . name ) {
1034
1038
metadata . friendlyName = metadata . name ;
@@ -1049,7 +1053,7 @@ Table.prototype.setMetadata = function(metadata, callback) {
1049
1053
return ;
1050
1054
}
1051
1055
1052
- that . metadata = resp ;
1056
+ self . metadata = resp ;
1053
1057
1054
1058
callback ( null , resp ) ;
1055
1059
} ) ;
0 commit comments