Skip to content

Commit c423b6e

Browse files
committed
downgrade to Spark 2.1.1, change to version 2.3.2, avoid NullPointerException for optional network topology processing
1 parent f8fb899 commit c423b6e

File tree

12 files changed

+52
-72
lines changed

12 files changed

+52
-72
lines changed

CIMConnector/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
<parent>
99
<groupId>ch.ninecode.cim</groupId>
1010
<artifactId>CIMApplication</artifactId>
11-
<version>2.3.1</version>
11+
<version>2.3.2</version>
1212
<relativePath>../pom.xml</relativePath>
1313
</parent>
1414
<artifactId>CIMConnector</artifactId>

CIMEar/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
<groupId>ch.ninecode.cim</groupId>
1010
<artifactId>CIMApplication</artifactId>
1111
<relativePath>../pom.xml</relativePath>
12-
<version>2.3.1</version>
12+
<version>2.3.2</version>
1313
</parent>
1414
<artifactId>CIMEar</artifactId>
1515
<packaging>ear</packaging>

CIMWeb/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
<parent>
99
<groupId>ch.ninecode.cim</groupId>
1010
<artifactId>CIMApplication</artifactId>
11-
<version>2.3.1</version>
11+
<version>2.3.2</version>
1212
</parent>
1313
<artifactId>CIMWeb</artifactId>
1414
<packaging>war</packaging>

GeoVis/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
<parent>
44
<groupId>ch.ninecode.cim</groupId>
55
<artifactId>CIMApplication</artifactId>
6-
<version>2.3.1</version>
6+
<version>2.3.2</version>
77
</parent>
88
<groupId>ch.ninecode.geo</groupId>
99
<artifactId>GeoVis</artifactId>

GridLAB-D/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
<parent>
66
<artifactId>CIMApplication</artifactId>
77
<groupId>ch.ninecode.cim</groupId>
8-
<version>2.3.1</version>
8+
<version>2.3.2</version>
99
</parent>
1010
<artifactId>GridLAB-D</artifactId>
1111
<groupId>ch.ninecode.gl</groupId>

GridLAB-D/src/main/scala/ch/ninecode/esl/Einspeiseleistung.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -637,13 +637,13 @@ case class Einspeiseleistung (session: SparkSession, options: EinspeiseleistungO
637637
}
638638

639639
// identify topological nodes if necessary
640-
val tns = session.sparkContext.getPersistentRDDs.filter(_._2.name == "TopologicalNode")
641-
if (tns.isEmpty || tns.head._2.isEmpty)
642-
{
640+
// val tns = session.sparkContext.getPersistentRDDs.filter(_._2.name == "TopologicalNode")
641+
// if (tns.isEmpty || tns.head._2.isEmpty)
642+
// {
643643
val ntp = new CIMNetworkTopologyProcessor (session, storage_level)
644644
val ele = ntp.process (false)
645645
log.info (ele.count () + " elements")
646-
}
646+
// }
647647

648648
val topo = System.nanoTime ()
649649
log.info ("topology: " + (topo - read) / 1e9 + " seconds")

GridLAB-D/src/main/scala/ch/ninecode/export/Export.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -212,13 +212,13 @@ case class Export (session: SparkSession, storage_level: StorageLevel, options:
212212
}
213213

214214
// identify topological nodes if necessary
215-
val tns = session.sparkContext.getPersistentRDDs.filter(_._2.name == "TopologicalNode")
216-
if (tns.isEmpty || tns.head._2.isEmpty)
217-
{
215+
// val tns = session.sparkContext.getPersistentRDDs.filter(_._2.name == "TopologicalNode")
216+
// if (tns.isEmpty || tns.head._2.isEmpty)
217+
// {
218218
val ntp = new CIMNetworkTopologyProcessor (session, storage_level)
219219
val ele = ntp.process (false)
220220
log.info (ele.count () + " elements")
221-
}
221+
// }
222222

223223
val topo = System.nanoTime ()
224224
log.info ("topology: " + (topo - read) / 1e9 + " seconds")

GridLAB-D/src/main/scala/ch/ninecode/gl/Transformers.scala

Lines changed: 33 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -6,25 +6,16 @@ import org.apache.spark.rdd.RDD
66
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
77
import org.apache.spark.sql.SparkSession
88
import org.apache.spark.storage.StorageLevel
9+
import org.slf4j.Logger
910
import org.slf4j.LoggerFactory
1011

12+
import ch.ninecode.cim.CIMRDD
1113
import ch.ninecode.model._
1214

13-
class Transformers (session: SparkSession, storage_level: StorageLevel) extends Serializable
15+
class Transformers (session: SparkSession, storage_level: StorageLevel) extends CIMRDD with Serializable
1416
{
15-
val log = LoggerFactory.getLogger (getClass)
16-
17-
def get (name: String): RDD[Element] =
18-
{
19-
val rdds = session.sparkContext.getPersistentRDDs
20-
for (key <- rdds.keys)
21-
{
22-
val rdd = rdds (key)
23-
if (rdd.name == name)
24-
return (rdd.asInstanceOf[RDD[Element]])
25-
}
26-
return (null)
27-
}
17+
implicit val spark: SparkSession = session
18+
implicit val log: Logger = LoggerFactory.getLogger (getClass)
2819

2920
/**
3021
* Handle a join of power transformers and stations with the power transformer ends.
@@ -33,7 +24,7 @@ class Transformers (session: SparkSession, storage_level: StorageLevel) extends
3324
* @param x - the list of transformers and stations keyed by transformer id, joined with transformer ends
3425
* @return a list of a pair of transformer name and a four-tuple of transformer, station, high side end with voltage and low side end with voltage
3526
*/
36-
def addEnds (voltages: Map[String, Double]) (x: Tuple2[String,Tuple2[Tuple2[PowerTransformer,Substation],Option[Iterable[PowerTransformerEnd]]]]) =
27+
def addEnds (voltages: Map[String, Double]) (x: (String, ((PowerTransformer, Substation), Option[Iterable[PowerTransformerEnd]]))): List[(String, (PowerTransformer, Substation, (PowerTransformerEnd, Double), (PowerTransformerEnd, Double)))] =
3728
{
3829
val ends = x._2._2 match
3930
{
@@ -42,17 +33,17 @@ class Transformers (session: SparkSession, storage_level: StorageLevel) extends
4233
case None =>
4334
Array[PowerTransformerEnd]()
4435
}
45-
if (ends.size > 2)
36+
if (ends.length > 2)
4637
log.warn ("more than two transformer ends for " + x._1)
47-
val ret = if (ends.size == 0)
38+
val ret = if (ends.length == 0)
4839
{
4940
log.error ("no transformer ends for " + x._1)
50-
List[Tuple2[String,Tuple4[PowerTransformer,Substation,Tuple2[PowerTransformerEnd,Double],Tuple2[PowerTransformerEnd,Double]]]]()
41+
List[(String, (PowerTransformer, Substation, (PowerTransformerEnd, Double), (PowerTransformerEnd, Double)))]()
5142
}
52-
else if (ends.size == 1)
43+
else if (ends.length == 1)
5344
{
5445
log.error ("less than two transformer ends for " + x._1)
55-
List[Tuple2[String,Tuple4[PowerTransformer,Substation,Tuple2[PowerTransformerEnd,Double],Tuple2[PowerTransformerEnd,Double]]]]()
46+
List[(String, (PowerTransformer, Substation, (PowerTransformerEnd, Double), (PowerTransformerEnd, Double)))]()
5647
}
5748
else
5849
{
@@ -72,7 +63,7 @@ class Transformers (session: SparkSession, storage_level: StorageLevel) extends
7263
* NOTE: Skips transformers with terminals not matching the transformer ends
7364
* @return a list of four-tuples of transformer, station, and two three-tuples of end, voltage and terminal
7465
*/
75-
def addTerminals (x: Tuple2[String,Tuple2[Tuple4[PowerTransformer, Substation, Tuple2[PowerTransformerEnd,Double], Tuple2[PowerTransformerEnd,Double]],Option[Iterable[Terminal]]]]) =
66+
def addTerminals (x: (String, ((PowerTransformer, Substation, (PowerTransformerEnd, Double), (PowerTransformerEnd, Double)), Option[Iterable[Terminal]]))): List[(PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal))] =
7667
{
7768
val terminals = x._2._2 match
7869
{
@@ -95,28 +86,28 @@ class Transformers (session: SparkSession, storage_level: StorageLevel) extends
9586
List((x._2._1._1, x._2._1._2, (end1._1, end1._2, t1), (end2._1, end2._2, t2)))
9687
case None =>
9788
log.error ("terminal not found for " + end2._1.id)
98-
List[Tuple4[PowerTransformer,Substation,Tuple3[PowerTransformerEnd,Double,Terminal],Tuple3[PowerTransformerEnd,Double,Terminal]]]()
89+
List[(PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal))]()
9990
}
10091
case None =>
10192
match1 match
10293
{
103-
case Some (t1) =>
94+
case Some (_) =>
10495
log.error ("terminal not found for " + end1._1.id)
105-
List[Tuple4[PowerTransformer,Substation,Tuple3[PowerTransformerEnd,Double,Terminal],Tuple3[PowerTransformerEnd,Double,Terminal]]]()
96+
List[(PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal))]()
10697
case None =>
10798
log.error ("no terminals not found for " + x._2._1._1.id)
108-
List[Tuple4[PowerTransformer,Substation,Tuple3[PowerTransformerEnd,Double,Terminal],Tuple3[PowerTransformerEnd,Double,Terminal]]]()
99+
List[(PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal))]()
109100
}
110101
}
111102
ret
112103
}
113104

114-
def addSC (arg: ((PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal)), Option[ShortCircuitData])) =
105+
def addSC (arg: ((PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal)), Option[ShortCircuitData])): (PowerTransformer, Substation, (PowerTransformerEnd, Double, Terminal), (PowerTransformerEnd, Double, Terminal), ShortCircuitData) =
115106
{
116107
arg._2 match
117108
{
118109
case Some (sc) => (arg._1._1, arg._1._2, arg._1._3, arg._1._4, sc)
119-
case None => (arg._1._1, arg._1._2, arg._1._3, arg._1._4, ShortCircuitData (arg._1._2.id, 200, -70, false))
110+
case None => (arg._1._1, arg._1._2, arg._1._3, arg._1._4, ShortCircuitData (arg._1._2.id, 200, -70, valid = false))
120111
}
121112
}
122113

@@ -133,54 +124,42 @@ class Transformers (session: SparkSession, storage_level: StorageLevel) extends
133124
}
134125

135126
// get all transformers in substations
136-
val transformers = get ("PowerTransformer").asInstanceOf[RDD[PowerTransformer]]
137-
val substation_transformers = transformers.filter ((t: PowerTransformer) => { (t.ConductingEquipment.Equipment.PowerSystemResource.IdentifiedObject.name != "Messen_Steuern") })
127+
val transformers = get[PowerTransformer]
128+
val substation_transformers = transformers.filter ((t: PowerTransformer) => t.ConductingEquipment.Equipment.PowerSystemResource.IdentifiedObject.name != "Messen_Steuern")
138129

139130
// get an RDD of substations by filtering out distribution boxes
140-
val stations = get ("Substation").asInstanceOf[RDD[Substation]].filter (_.EquipmentContainer.ConnectivityNodeContainer.PowerSystemResource.PSRType != "PSRType_DistributionBox")
131+
val stations = get[Substation].filter (_.EquipmentContainer.ConnectivityNodeContainer.PowerSystemResource.PSRType != "PSRType_DistributionBox")
141132

142133
// the equipment container for a transformer could be a Bay, VoltageLevel or Station... the first two of which have a reference to their station
143-
def station_fn (x: Tuple2[String, Any]) =
134+
def station_fn (x: (String, Any)) =
144135
{
145136
x match
146137
{
147-
case (key: String, (t: PowerTransformer, station: Substation)) =>
148-
{
149-
(station.id, t)
150-
}
151-
case (key: String, (t: PowerTransformer, bay: Bay)) =>
152-
{
153-
(bay.Substation, t)
154-
}
155-
case (key: String, (t: PowerTransformer, level: VoltageLevel)) =>
156-
{
157-
(level.Substation, t)
158-
}
159-
case _ =>
160-
{
161-
throw new Exception ("this should never happen -- default case")
162-
}
138+
case (_, (t: PowerTransformer, station: Substation)) => (station.id, t)
139+
case (_, (t: PowerTransformer, bay: Bay)) => (bay.Substation, t)
140+
case (_, (t: PowerTransformer, level: VoltageLevel)) => (level.Substation, t)
141+
case _ => throw new Exception ("this should never happen -- default case")
163142
}
164143
}
165144

166145
// create an RDD of container-transformer pairs, e.g. { (KAB8526,TRA13730), (STA4551,TRA4425), ... }
167-
val elements = get ("Elements").asInstanceOf[RDD[Element]]
146+
val elements = get[Element]("Elements")
168147
val tpairs = substation_transformers.keyBy(_.ConductingEquipment.Equipment.EquipmentContainer).join (elements.keyBy (_.id)).map (station_fn)
169148

170149
// only keep the pairs where the transformer is in a substation we have
171150
val transformers_stations = tpairs.join (stations.keyBy (_.id)).values
172151

173152
// get the transformer ends keyed by transformer
174-
val ends = get ("PowerTransformerEnd").asInstanceOf[RDD[PowerTransformerEnd]].groupBy (_.PowerTransformer)
153+
val ends = get[PowerTransformerEnd].groupBy (_.PowerTransformer)
175154

176155
// get a map of voltages
177-
val voltages = get ("BaseVoltage").asInstanceOf[RDD[BaseVoltage]].map ((v) => (v.id, v.nominalVoltage)).collectAsMap ()
156+
val voltages = get[BaseVoltage].map ((v) => (v.id, v.nominalVoltage)).collectAsMap ()
178157

179158
// attach PowerTransformerEnd elements
180159
val transformers_stations_plus_ends = transformers_stations.keyBy (_._1.id).leftOuterJoin (ends).flatMap (addEnds (voltages))
181160

182161
// get the terminals keyed by transformer
183-
val terms = get ("Terminal").asInstanceOf[RDD[Terminal]].groupBy (_.ConductingEquipment)
162+
val terms = get[Terminal].groupBy (_.ConductingEquipment)
184163

185164
// attach Terminal elements
186165
val transformers_stations_plus_ends_plus_terminals = transformers_stations_plus_ends.leftOuterJoin (terms).flatMap (addTerminals)
@@ -205,10 +184,11 @@ class Transformers (session: SparkSession, storage_level: StorageLevel) extends
205184
transformer_data.persist (storage_level)
206185
session.sparkContext.getCheckpointDir match
207186
{
208-
case Some (dir) => transformer_data.checkpoint ()
187+
case Some (_) => transformer_data.checkpoint ()
209188
case None =>
210189
}
211190

212-
return (transformer_data)
191+
transformer_data
213192
}
214193
}
194+

ShortCircuit/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
<parent>
66
<artifactId>CIMApplication</artifactId>
77
<groupId>ch.ninecode.cim</groupId>
8-
<version>2.3.1</version>
8+
<version>2.3.2</version>
99
</parent>
1010
<artifactId>ShortCircuit</artifactId>
1111
<groupId>ch.ninecode.sc</groupId>

SmartMeter/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
<parent>
66
<artifactId>CIMApplication</artifactId>
77
<groupId>ch.ninecode.cim</groupId>
8-
<version>2.3.1</version>
8+
<version>2.3.2</version>
99
</parent>
1010
<artifactId>SmartMeter</artifactId>
1111
<groupId>ch.ninecode.sm</groupId>

Spatial/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
<parent>
44
<groupId>ch.ninecode.cim</groupId>
55
<artifactId>CIMApplication</artifactId>
6-
<version>2.3.1</version>
6+
<version>2.3.2</version>
77
</parent>
88
<groupId>ch.ninecode.sp</groupId>
99
<artifactId>Spatial</artifactId>

pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
<modelVersion>4.0.0</modelVersion>
44
<groupId>ch.ninecode.cim</groupId>
55
<artifactId>CIMApplication</artifactId>
6-
<version>2.3.1</version>
6+
<version>2.3.2</version>
77
<packaging>pom</packaging>
88
<name>CIM Application Multimodule</name>
99

@@ -22,8 +22,8 @@
2222

2323
<!-- Spark versions -->
2424
<version.dependency.hadoop>2.7.3</version.dependency.hadoop>
25-
<version.dependency.spark>2.2.0</version.dependency.spark>
26-
<version.dependency.cimreader>2.11-2.2.0-2.2.0</version.dependency.cimreader>
25+
<version.dependency.spark>2.1.1</version.dependency.spark>
26+
<version.dependency.cimreader>2.11-2.1.1-2.2.0</version.dependency.cimreader>
2727

2828
<version.dependency.junit>4.12</version.dependency.junit>
2929

0 commit comments

Comments
 (0)