@@ -94,18 +94,9 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
94
94
95
95
override def registerClasses (kryo : Kryo ) {
96
96
97
- def registerByName (kryo : Kryo , name : String ) {
98
- try {
99
- kryo.register(Class .forName(name))
100
- } catch {
101
- case cnfe : java.lang.ClassNotFoundException => {
102
- debug(" Could not register class %s by name" .format(name))
103
- }
104
- }
105
- }
106
-
107
97
// Register Avro classes using fully qualified class names
108
98
// Sort alphabetically and add blank lines between packages
99
+ // Classes that require Class.forName list below in forNameClasses
109
100
110
101
// htsjdk.samtools
111
102
kryo.register(classOf [htsjdk.samtools.CigarElement ])
@@ -116,8 +107,6 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
116
107
kryo.register(classOf [htsjdk.samtools.SAMSequenceDictionary ])
117
108
kryo.register(classOf [htsjdk.samtools.SAMFileHeader ])
118
109
kryo.register(classOf [htsjdk.samtools.SAMSequenceRecord ])
119
- registerByName(kryo, " htsjdk.samtools.SAMFileHeader$GroupOrder" )
120
- registerByName(kryo, " htsjdk.samtools.SAMFileHeader$SortOrder" )
121
110
122
111
// htsjdk.variant.vcf
123
112
kryo.register(classOf [htsjdk.variant.vcf.VCFContigHeaderLine ])
@@ -128,7 +117,6 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
128
117
kryo.register(classOf [htsjdk.variant.vcf.VCFHeaderLine ])
129
118
kryo.register(classOf [htsjdk.variant.vcf.VCFHeaderLineCount ])
130
119
kryo.register(classOf [htsjdk.variant.vcf.VCFHeaderLineType ])
131
- registerByName(kryo, " htsjdk.variant.vcf.VCFCompoundHeaderLine$SupportedHeaderLineType" )
132
120
133
121
// java.lang
134
122
kryo.register(classOf [java.lang.Class [_]])
@@ -140,23 +128,6 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
140
128
kryo.register(classOf [java.util.HashMap [_, _]])
141
129
kryo.register(classOf [java.util.HashSet [_]])
142
130
143
- // org.apache.avro
144
- registerByName(kryo, " org.apache.avro.Schema$RecordSchema" )
145
- registerByName(kryo, " org.apache.avro.Schema$Field" )
146
- registerByName(kryo, " org.apache.avro.Schema$Field$Order" )
147
- registerByName(kryo, " org.apache.avro.Schema$UnionSchema" )
148
- registerByName(kryo, " org.apache.avro.Schema$Type" )
149
- registerByName(kryo, " org.apache.avro.Schema$LockableArrayList" )
150
- registerByName(kryo, " org.apache.avro.Schema$BooleanSchema" )
151
- registerByName(kryo, " org.apache.avro.Schema$NullSchema" )
152
- registerByName(kryo, " org.apache.avro.Schema$StringSchema" )
153
- registerByName(kryo, " org.apache.avro.Schema$IntSchema" )
154
- registerByName(kryo, " org.apache.avro.Schema$FloatSchema" )
155
- registerByName(kryo, " org.apache.avro.Schema$EnumSchema" )
156
- registerByName(kryo, " org.apache.avro.Schema$Name" )
157
- registerByName(kryo, " org.apache.avro.Schema$LongSchema" )
158
- registerByName(kryo, " org.apache.avro.generic.GenericData$Array" )
159
-
160
131
// org.apache.hadoop.conf
161
132
kryo.register(classOf [org.apache.hadoop.conf.Configuration ],
162
133
new WritableSerializer [org.apache.hadoop.conf.Configuration ])
@@ -291,23 +262,10 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
291
262
kryo.register(classOf [org.bdgenomics.formats.avro.VariantCallingAnnotations ],
292
263
new AvroSerializer [org.bdgenomics.formats.avro.VariantCallingAnnotations ])
293
264
294
- // org.apache.spark.internal
295
- registerByName(kryo, " org.apache.spark.internal.io.FileCommitProtocol$TaskCommitMessage" )
296
-
297
265
// org.apache.spark.catalyst
298
266
kryo.register(classOf [org.apache.spark.sql.catalyst.expressions.UnsafeRow ])
299
267
300
268
// org.apache.spark.sql
301
- registerByName(kryo, " org.apache.spark.sql.execution.datasources.FileFormatWriter$WriteTaskResult" )
302
- registerByName(kryo, " org.apache.spark.sql.execution.datasources.BasicWriteTaskStats" )
303
- registerByName(kryo, " org.apache.spark.sql.execution.datasources.ExecutedWriteSummary" )
304
- registerByName(kryo, " org.apache.spark.sql.execution.datasources.WriteTaskResult" )
305
- registerByName(kryo, " org.apache.spark.sql.types.BooleanType$" )
306
- registerByName(kryo, " org.apache.spark.sql.types.DoubleType$" )
307
- registerByName(kryo, " org.apache.spark.sql.types.FloatType$" )
308
- registerByName(kryo, " org.apache.spark.sql.types.IntegerType$" )
309
- registerByName(kryo, " org.apache.spark.sql.types.LongType$" )
310
- registerByName(kryo, " org.apache.spark.sql.types.StringType$" )
311
269
kryo.register(classOf [org.apache.spark.sql.types.ArrayType ])
312
270
kryo.register(classOf [org.apache.spark.sql.types.MapType ])
313
271
kryo.register(classOf [org.apache.spark.sql.types.Metadata ])
@@ -354,26 +312,14 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
354
312
kryo.register(classOf [scala.Array [Long ]])
355
313
kryo.register(classOf [scala.Array [String ]])
356
314
kryo.register(classOf [scala.Array [Option [_]]])
357
- registerByName(kryo, " scala.Tuple2$mcCC$sp" )
358
-
359
- // scala.collection
360
- registerByName(kryo, " scala.collection.Iterator$$anon$11" )
361
- registerByName(kryo, " scala.collection.Iterator$$anonfun$toStream$1" )
362
-
363
- // scala.collection.convert
364
- registerByName(kryo, " scala.collection.convert.Wrappers$" )
365
315
366
316
// scala.collection.immutable
367
317
kryo.register(classOf [scala.collection.immutable.:: [_]])
368
318
kryo.register(classOf [scala.collection.immutable.Range ])
369
- registerByName(kryo, " scala.collection.immutable.Stream$Cons" )
370
- registerByName(kryo, " scala.collection.immutable.Stream$Empty$" )
371
- registerByName(kryo, " scala.collection.immutable.Set$EmptySet$" )
372
319
373
320
// scala.collection.mutable
374
321
kryo.register(classOf [scala.collection.mutable.ArrayBuffer [_]])
375
322
kryo.register(classOf [scala.collection.mutable.ListBuffer [_]])
376
- registerByName(kryo, " scala.collection.mutable.ListBuffer$$anon$1" )
377
323
kryo.register(classOf [scala.collection.mutable.WrappedArray .ofInt])
378
324
kryo.register(classOf [scala.collection.mutable.WrappedArray .ofLong])
379
325
kryo.register(classOf [scala.collection.mutable.WrappedArray .ofByte])
@@ -383,47 +329,67 @@ class ADAMKryoRegistrator extends KryoRegistrator with Logging {
383
329
// scala.math
384
330
kryo.register(scala.math.Numeric .LongIsIntegral .getClass)
385
331
386
- // scala.reflect
387
- registerByName(kryo, " scala.reflect.ClassTag$GenericClassTag" )
388
-
389
- // This seems to be necessary when serializing a RangePartitioner, which writes out a ClassTag:
390
- //
391
- // https://github.com/apache/spark/blob/v1.5.2/core/src/main/scala/org/apache/spark/Partitioner.scala#L220
392
- //
393
- // See also:
394
- //
395
- // https://mail-archives.apache.org/mod_mbox/spark-user/201504.mbox/%3CCAC95X6JgXQ3neXF6otj6a+F_MwJ9jbj9P-Ssw3Oqkf518_eT1w@mail.gmail.com%3E
396
- registerByName(kryo, " scala.reflect.ClassTag$$anon$1" )
397
-
398
- // needed for manifests
399
- registerByName(kryo, " scala.reflect.ManifestFactory$ClassTypeManifest" )
400
-
401
- // Added to Spark in 1.6.0; needed here for Spark < 1.6.0.
402
- kryo.register(classOf [Array [Tuple1 [Any ]]])
403
- kryo.register(classOf [Array [(Any , Any )]])
404
- kryo.register(classOf [Array [(Any , Any , Any )]])
405
- kryo.register(classOf [Array [(Any , Any , Any , Any )]])
406
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any )]])
407
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any )]])
408
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any )]])
409
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any )]])
410
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
411
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
412
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
413
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
414
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
415
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
416
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
417
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
418
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
419
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
420
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
421
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
422
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
423
- kryo.register(classOf [Array [(Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any , Any )]])
424
-
425
332
kryo.register(Map .empty.getClass)
426
- kryo.register(Nil .getClass)
427
- kryo.register(None .getClass)
333
+
334
+ ADAMKryoRegistrator .forNameClasses.foreach { clazz =>
335
+ try {
336
+ kryo.register(clazz)
337
+ } catch {
338
+ case _ : Throwable => // do nothing
339
+ }
340
+ }
341
+ }
342
+ }
343
+
344
+ private [serialization] object ADAMKryoRegistrator {
345
+ private lazy val forNameClasses : Seq [Class [_]] = {
346
+ Seq (
347
+ " htsjdk.samtools.SAMFileHeader$GroupOrder" ,
348
+ " htsjdk.samtools.SAMFileHeader$SortOrder" ,
349
+ " htsjdk.variant.vcf.VCFCompoundHeaderLine$SupportedHeaderLineType" ,
350
+ " org.apache.avro.Schema$RecordSchema" ,
351
+ " org.apache.avro.Schema$Field" ,
352
+ " org.apache.avro.Schema$Field$Order" ,
353
+ " org.apache.avro.Schema$UnionSchema" ,
354
+ " org.apache.avro.Schema$Type" ,
355
+ " org.apache.avro.Schema$LockableArrayList" ,
356
+ " org.apache.avro.Schema$BooleanSchema" ,
357
+ " org.apache.avro.Schema$NullSchema" ,
358
+ " org.apache.avro.Schema$StringSchema" ,
359
+ " org.apache.avro.Schema$IntSchema" ,
360
+ " org.apache.avro.Schema$FloatSchema" ,
361
+ " org.apache.avro.Schema$EnumSchema" ,
362
+ " org.apache.avro.Schema$Name" ,
363
+ " org.apache.avro.Schema$LongSchema" ,
364
+ " org.apache.avro.generic.GenericData$Array" ,
365
+ " org.apache.spark.internal.io.FileCommitProtocol$TaskCommitMessage" ,
366
+ " org.apache.spark.sql.execution.datasources.FileFormatWriter$WriteTaskResult" ,
367
+ " org.apache.spark.sql.execution.datasources.BasicWriteTaskStats" ,
368
+ " org.apache.spark.sql.execution.datasources.ExecutedWriteSummary" ,
369
+ " org.apache.spark.sql.execution.datasources.WriteTaskResult" ,
370
+ " org.apache.spark.sql.types.BooleanType$" ,
371
+ " org.apache.spark.sql.types.DoubleType$" ,
372
+ " org.apache.spark.sql.types.FloatType$" ,
373
+ " org.apache.spark.sql.types.IntegerType$" ,
374
+ " org.apache.spark.sql.types.LongType$" ,
375
+ " org.apache.spark.sql.types.StringType$" ,
376
+ " scala.Tuple2$mcCC$sp" ,
377
+ " scala.collection.Iterator$$anon$11" ,
378
+ " scala.collection.Iterator$$anonfun$toStream$1" ,
379
+ " scala.collection.convert.Wrappers$" ,
380
+ " scala.collection.immutable.Stream$Cons" ,
381
+ " scala.collection.immutable.Stream$Empty$" ,
382
+ " scala.collection.immutable.Set$EmptySet$" ,
383
+ " scala.collection.mutable.ListBuffer$$anon$1" ,
384
+ " scala.reflect.ClassTag$GenericClassTag" ,
385
+ " scala.reflect.ClassTag$$anon$1" ,
386
+ " scala.reflect.ManifestFactory$ClassTypeManifest"
387
+ ).flatMap { name =>
388
+ try {
389
+ Some [Class [_]](Class .forName(name))
390
+ } catch {
391
+ case _ : Throwable => None
392
+ }
393
+ }
428
394
}
429
395
}
0 commit comments