Skip to content

Commit 4f90248

Browse files
bbrehmmpollmeier
andauthored
format everything (#364)
* format everything * upgrade to latest scalafmt (just because) * enforce scalafmtCheck on PR --------- Co-authored-by: Michael Pollmeier <michael@michaelpollmeier.com>
1 parent 2b60221 commit 4f90248

File tree

71 files changed

+1395
-1113
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+1395
-1113
lines changed

.github/workflows/pr.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,4 @@ jobs:
1818
~/.sbt
1919
~/.coursier
2020
key: ${{ runner.os }}-sbt-${{ hashfiles('**/build.sbt') }}
21-
- run: sbt +test
21+
- run: sbt scalafmtCheck Test/scalafmtCheck +test

.scalafmt.conf

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
1-
maxColumn = 100
2-
rewrite.rules = [AvoidInfix, SortImports]
1+
runner.dialect = scala213
2+
version=3.7.3
3+
maxColumn = 120
4+
rewrite.rules = [AvoidInfix]

formats/src/main/scala/overflowdb/formats/Exporter.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ import java.nio.file.{Path, Paths}
44
import overflowdb.{Edge, Graph, Node}
55
import scala.jdk.CollectionConverters.IteratorHasAsScala
66

7-
87
trait Exporter {
98

109
def defaultFileExtension: String
@@ -18,4 +17,4 @@ trait Exporter {
1817
runExport(graph, Paths.get(outputFile))
1918
}
2019

21-
case class ExportResult(nodeCount: Int, edgeCount: Int, files: Seq[Path], additionalInfo: Option[String])
20+
case class ExportResult(nodeCount: Int, edgeCount: Int, files: Seq[Path], additionalInfo: Option[String])

formats/src/main/scala/overflowdb/formats/ExporterMain.scala

Lines changed: 33 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -13,44 +13,42 @@ import java.nio.file.{Files, Path, Paths}
1313
import scala.jdk.CollectionConverters.SeqHasAsJava
1414
import scala.util.Using
1515

16-
/**
17-
* Base functionality to export a given OverflowDB graph to various export formats.
18-
* Because ODB relies on domain specific implementations, specifically the NodeFactories and EdgeFactories from the
19-
* domain-specific generated classes (typically generated by by https://github.com/ShiftLeftSecurity/overflowdb-codegen)
20-
* need to be passed in.
21-
*/
16+
/** Base functionality to export a given OverflowDB graph to various export formats. Because ODB relies on domain
17+
* specific implementations, specifically the NodeFactories and EdgeFactories from the domain-specific generated
18+
* classes (typically generated by by https://github.com/ShiftLeftSecurity/overflowdb-codegen) need to be passed in.
19+
*/
2220
object ExporterMain {
2321
lazy val logger = LoggerFactory.getLogger(getClass)
2422

25-
def apply(nodeFactories: Seq[NodeFactory[_]], edgeFactories: Seq[EdgeFactory[_]]): Array[String] => Unit = {
26-
args =>
27-
OParser.parse(parser, args, Config(Paths.get("/dev/null"), null, Paths.get("/dev/null")))
28-
.map { case Config(inputFile, format, outputFile) =>
29-
if (Files.notExists(inputFile))
30-
throw new AssertionError(s"given input file $inputFile does not exist")
31-
if (Files.exists(outputFile)) {
32-
if (Files.isRegularFile(outputFile))
33-
throw new AssertionError(s"output file $outputFile already exists and is not a directory")
34-
} else {
35-
Files.createDirectories(outputFile)
36-
}
23+
def apply(nodeFactories: Seq[NodeFactory[_]], edgeFactories: Seq[EdgeFactory[_]]): Array[String] => Unit = { args =>
24+
OParser
25+
.parse(parser, args, Config(Paths.get("/dev/null"), null, Paths.get("/dev/null")))
26+
.map { case Config(inputFile, format, outputFile) =>
27+
if (Files.notExists(inputFile))
28+
throw new AssertionError(s"given input file $inputFile does not exist")
29+
if (Files.exists(outputFile)) {
30+
if (Files.isRegularFile(outputFile))
31+
throw new AssertionError(s"output file $outputFile already exists and is not a directory")
32+
} else {
33+
Files.createDirectories(outputFile)
34+
}
3735

38-
val exporter: Exporter = format match {
39-
case Format.Neo4jCsv => Neo4jCsvExporter
40-
case Format.GraphML => GraphMLExporter
41-
case Format.GraphSON => GraphSONExporter
42-
case Format.Dot => DotExporter
43-
}
44-
val odbConfig = overflowdb.Config.withoutOverflow.withStorageLocation(inputFile)
45-
logger.info(s"starting export of graph in $inputFile to storagePath=$outputFile in format=$format")
46-
val ExportResult(nodeCount, edgeCount, files, additionalInfo) =
47-
Using.resource(Graph.open(odbConfig, nodeFactories.asJava, edgeFactories.asJava)) { graph =>
48-
exporter.runExport(graph, outputFile)
49-
}
50-
logger.info(s"export completed successfully: $nodeCount nodes, $edgeCount edges in ${files.size} files")
51-
additionalInfo.foreach(logger.info)
36+
val exporter: Exporter = format match {
37+
case Format.Neo4jCsv => Neo4jCsvExporter
38+
case Format.GraphML => GraphMLExporter
39+
case Format.GraphSON => GraphSONExporter
40+
case Format.Dot => DotExporter
5241
}
53-
}
42+
val odbConfig = overflowdb.Config.withoutOverflow.withStorageLocation(inputFile)
43+
logger.info(s"starting export of graph in $inputFile to storagePath=$outputFile in format=$format")
44+
val ExportResult(nodeCount, edgeCount, files, additionalInfo) =
45+
Using.resource(Graph.open(odbConfig, nodeFactories.asJava, edgeFactories.asJava)) { graph =>
46+
exporter.runExport(graph, outputFile)
47+
}
48+
logger.info(s"export completed successfully: $nodeCount nodes, $edgeCount edges in ${files.size} files")
49+
additionalInfo.foreach(logger.info)
50+
}
51+
}
5452

5553
private lazy val builder = OParser.builder[Config]
5654
private lazy val parser = {
@@ -62,14 +60,14 @@ object ExporterMain {
6260
.required()
6361
.action((x, c) => c.copy(format = Format.byNameLowercase(x)))
6462
.text(s"export format, one of [${Format.valuesAsStringLowercase.mkString("|")}]"),
65-
opt[File]('o', "out") // will be able to read a `Path` with scopt 4.0.2+ (once released)
63+
opt[File]('o', "out") // will be able to read a `Path` with scopt 4.0.2+ (once released)
6664
.required()
6765
.action((x, c) => c.copy(outputFile = x.toPath))
6866
.text("output file or directory - must exist and be writable"),
6967
arg[File]("odbBinaryFile")
7068
.required()
7169
.action((x, c) => c.copy(inputFile = x.toPath))
72-
.text("input overflowdb graph file - must exist and be readable"),
70+
.text("input overflowdb graph file - must exist and be readable")
7371
)
7472
}
7573

formats/src/main/scala/overflowdb/formats/ImporterMain.scala

Lines changed: 39 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -12,44 +12,49 @@ import java.nio.file.{Files, Path, Paths}
1212
import scala.jdk.CollectionConverters.SeqHasAsJava
1313
import scala.util.Using
1414

15-
/**
16-
* Base functionality import a given list of input file(s) of various formats into an OverflowDB binary.
17-
* Because ODB relies on domain specific implementations, specifically the NodeFactories and EdgeFactories from the
18-
* domain-specific generated classes (typically generated by by https://github.com/ShiftLeftSecurity/overflowdb-codegen)
19-
* need to be passed in.
20-
*/
15+
/** Base functionality import a given list of input file(s) of various formats into an OverflowDB binary. Because ODB
16+
* relies on domain specific implementations, specifically the NodeFactories and EdgeFactories from the domain-specific
17+
* generated classes (typically generated by by https://github.com/ShiftLeftSecurity/overflowdb-codegen) need to be
18+
* passed in.
19+
*/
2120
object ImporterMain extends App {
2221
lazy val logger = LoggerFactory.getLogger(getClass)
2322

24-
def apply(nodeFactories: Seq[NodeFactory[_]],
25-
edgeFactories: Seq[EdgeFactory[_]],
26-
convertPropertyForPersistence: Any => Any = identity): Array[String] => Unit = {
27-
args =>
28-
OParser.parse(parser, args, Config(Nil, null, Paths.get("/dev/null")))
29-
.map { case Config(inputFiles, format, outputFile) =>
30-
val nonExistent = inputFiles.filterNot(Files.exists(_))
31-
if (nonExistent.nonEmpty)
32-
throw new AssertionError(s"given input files $nonExistent do not exist")
23+
def apply(
24+
nodeFactories: Seq[NodeFactory[_]],
25+
edgeFactories: Seq[EdgeFactory[_]],
26+
convertPropertyForPersistence: Any => Any = identity
27+
): Array[String] => Unit = { args =>
28+
OParser
29+
.parse(parser, args, Config(Nil, null, Paths.get("/dev/null")))
30+
.map { case Config(inputFiles, format, outputFile) =>
31+
val nonExistent = inputFiles.filterNot(Files.exists(_))
32+
if (nonExistent.nonEmpty)
33+
throw new AssertionError(s"given input files $nonExistent do not exist")
3334

34-
Files.deleteIfExists(outputFile)
35+
Files.deleteIfExists(outputFile)
3536

36-
val importer: Importer = format match {
37-
case Format.Neo4jCsv => Neo4jCsvImporter
38-
case Format.GraphML => GraphMLImporter
39-
case Format.GraphSON => GraphSONImporter
40-
}
41-
val odbConfig = overflowdb.Config.withoutOverflow.withStorageLocation(outputFile)
42-
Using.resource(
43-
Graph.open(
44-
odbConfig,
45-
nodeFactories.asJava,
46-
edgeFactories.asJava,
47-
convertPropertyForPersistence(_).asInstanceOf[Object])) { graph =>
48-
logger.info(s"starting import of ${inputFiles.size} files in format=$format into a new overflowdb instance with storagePath=$outputFile")
49-
importer.runImport(graph, inputFiles)
50-
logger.info(s"import completed successfully")
51-
}
37+
val importer: Importer = format match {
38+
case Format.Neo4jCsv => Neo4jCsvImporter
39+
case Format.GraphML => GraphMLImporter
40+
case Format.GraphSON => GraphSONImporter
5241
}
42+
val odbConfig = overflowdb.Config.withoutOverflow.withStorageLocation(outputFile)
43+
Using.resource(
44+
Graph.open(
45+
odbConfig,
46+
nodeFactories.asJava,
47+
edgeFactories.asJava,
48+
convertPropertyForPersistence(_).asInstanceOf[Object]
49+
)
50+
) { graph =>
51+
logger.info(
52+
s"starting import of ${inputFiles.size} files in format=$format into a new overflowdb instance with storagePath=$outputFile"
53+
)
54+
importer.runImport(graph, inputFiles)
55+
logger.info(s"import completed successfully")
56+
}
57+
}
5358
}
5459

5560
private lazy val builder = OParser.builder[Config]
@@ -61,7 +66,7 @@ object ImporterMain extends App {
6166
opt[String]('f', "format")
6267
.required()
6368
.action((x, c) => c.copy(format = Format.byNameLowercase(x)))
64-
.text(s"import format, one of [${Format.valuesAsStringLowercase.mkString("|")}]"),
69+
.text(s"import format, one of [${Format.valuesAsStringLowercase.mkString("|")}]"),
6570
opt[File]('o', "out") // will be able to read a `Path` with scopt 4.0.2+ (once released)
6671
.required()
6772
.action((x, c) => c.copy(outputFile = x.toPath))
@@ -70,7 +75,7 @@ object ImporterMain extends App {
7075
.required()
7176
.unbounded()
7277
.action((x, c) => c.copy(inputFiles = c.inputFiles :+ x.toPath))
73-
.text("input files - must exist and be readable"),
78+
.text("input files - must exist and be readable")
7479
)
7580
}
7681

formats/src/main/scala/overflowdb/formats/dot/DotExporter.scala

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,14 @@ import java.nio.file.{Files, Path}
77
import scala.jdk.CollectionConverters.MapHasAsScala
88
import scala.util.Using
99

10-
/**
11-
* Exports OverflowDB Graph to graphviz dot/gv file
12-
*
13-
* Note: GraphML doesn't natively support list property types, so we fake it by encoding it as a `;` delimited string.
14-
* If you import this into a different database, you'll need to parse that separately.
15-
*
16-
* https://en.wikipedia.org/wiki/DOT_(graph_description_language)
17-
* https://www.graphviz.org/doc/info/lang.html
18-
* http://magjac.com/graphviz-visual-editor/
19-
* https://www.slideshare.net/albazo/graphiz-using-the-dot-language
20-
* */
10+
/** Exports OverflowDB Graph to graphviz dot/gv file
11+
*
12+
* Note: GraphML doesn't natively support list property types, so we fake it by encoding it as a `;` delimited string.
13+
* If you import this into a different database, you'll need to parse that separately.
14+
*
15+
* https://en.wikipedia.org/wiki/DOT_(graph_description_language) https://www.graphviz.org/doc/info/lang.html
16+
* http://magjac.com/graphviz-visual-editor/ https://www.slideshare.net/albazo/graphiz-using-the-dot-language
17+
*/
2118
object DotExporter extends Exporter {
2219
override def defaultFileExtension = "dot"
2320

@@ -63,9 +60,11 @@ object DotExporter extends Exporter {
6360
}
6461

6562
private def properties2Dot(properties: java.util.Map[String, Object]): String = {
66-
properties.asScala.map { case (key, value) =>
67-
s"$key=${encodePropertyValue(value)}"
68-
}.mkString(" ")
63+
properties.asScala
64+
.map { case (key, value) =>
65+
s"$key=${encodePropertyValue(value)}"
66+
}
67+
.mkString(" ")
6968
}
7069

7170
private def encodePropertyValue(value: Object): String = {

formats/src/main/scala/overflowdb/formats/graphml/GraphMLExporter.scala

Lines changed: 37 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,14 @@ import scala.collection.mutable
1010
import scala.jdk.CollectionConverters.MapHasAsScala
1111
import scala.xml.{PrettyPrinter, XML}
1212

13-
/**
14-
* Exports OverflowDB Graph to GraphML
15-
*
16-
* Warning: list properties are not natively supported by graphml...
17-
* We initially built some support for those which deviated from the spec, but given that other tools don't support
18-
* it, some refusing to import the remainder, we've dropped it. Now, lists are serialised to `;`-separated strings.
19-
*
20-
* https://en.wikipedia.org/wiki/GraphML
21-
* http://graphml.graphdrawing.org/primer/graphml-primer.html
22-
* */
13+
/** Exports OverflowDB Graph to GraphML
14+
*
15+
* Warning: list properties are not natively supported by graphml... We initially built some support for those which
16+
* deviated from the spec, but given that other tools don't support it, some refusing to import the remainder, we've
17+
* dropped it. Now, lists are serialised to `;`-separated strings.
18+
*
19+
* https://en.wikipedia.org/wiki/GraphML http://graphml.graphdrawing.org/primer/graphml-primer.html
20+
*/
2321
object GraphMLExporter extends Exporter {
2422

2523
override def defaultFileExtension = "xml"
@@ -47,9 +45,11 @@ object GraphMLExporter extends Exporter {
4745
}.toSeq
4846

4947
def propertyKeyXml(forAttr: String, propsMap: mutable.Map[String, PropertyContext]): String = {
50-
propsMap.map { case (key, PropertyContext(name, tpe)) =>
51-
s"""<key id="$key" for="$forAttr" attr.name="$name" attr.type="$tpe"></key>"""
52-
}.mkString(lineSeparator)
48+
propsMap
49+
.map { case (key, PropertyContext(name, tpe)) =>
50+
s"""<key id="$key" for="$forAttr" attr.name="$name" attr.type="$tpe"></key>"""
51+
}
52+
.mkString(lineSeparator)
5353
}
5454
val nodePropertyKeyEntries = propertyKeyXml("node", nodePropertyContextById)
5555
val edgePropertyKeyEntries = propertyKeyXml("edge", edgePropertyContextById)
@@ -87,30 +87,34 @@ object GraphMLExporter extends Exporter {
8787
)
8888
}
8989

90-
/**
91-
* warning: updates type information based on runtime instances (in mutable.Map `propertyTypeByName`)
92-
* warning2: updated the `discardedListPropertyCount` counter - if we need to discard any list properties, display a warning to the user
93-
*/
94-
private def dataEntries(prefix: String,
95-
element: Element,
96-
propertyContextById: mutable.Map[String, PropertyContext],
97-
discardedListPropertyCount: AtomicInteger): String = {
98-
element.propertiesMap.asScala.map { case (propertyName, propertyValue) =>
99-
if (isList(propertyValue.getClass)) {
100-
discardedListPropertyCount.incrementAndGet()
101-
"" // discard list properties
102-
} else { // scalar value
103-
val encodedPropertyName = s"${prefix}__${element.label}__$propertyName"
104-
val graphMLTpe = Type.fromRuntimeClass(propertyValue.getClass)
90+
/** warning: updates type information based on runtime instances (in mutable.Map `propertyTypeByName`) warning2:
91+
* updated the `discardedListPropertyCount` counter - if we need to discard any list properties, display a warning to
92+
* the user
93+
*/
94+
private def dataEntries(
95+
prefix: String,
96+
element: Element,
97+
propertyContextById: mutable.Map[String, PropertyContext],
98+
discardedListPropertyCount: AtomicInteger
99+
): String = {
100+
element.propertiesMap.asScala
101+
.map { case (propertyName, propertyValue) =>
102+
if (isList(propertyValue.getClass)) {
103+
discardedListPropertyCount.incrementAndGet()
104+
"" // discard list properties
105+
} else { // scalar value
106+
val encodedPropertyName = s"${prefix}__${element.label}__$propertyName"
107+
val graphMLTpe = Type.fromRuntimeClass(propertyValue.getClass)
105108

106-
/* update type information based on runtime instances */
109+
/* update type information based on runtime instances */
107110
if (!propertyContextById.contains(encodedPropertyName)) {
108111
propertyContextById.update(encodedPropertyName, PropertyContext(propertyName, graphMLTpe))
109112
}
110-
val xmlEncoded = xml.Utility.escape(propertyValue.toString)
111-
s"""<data key="$encodedPropertyName">$xmlEncoded</data>"""
113+
val xmlEncoded = xml.Utility.escape(propertyValue.toString)
114+
s"""<data key="$encodedPropertyName">$xmlEncoded</data>"""
115+
}
112116
}
113-
}.mkString(lineSeparator)
117+
.mkString(lineSeparator)
114118
}
115119

116120
private def xmlFormatInPlace(xmlFile: Path): Unit = {
@@ -120,4 +124,4 @@ object GraphMLExporter extends Exporter {
120124
writeFile(xmlFile, formatted)
121125
}
122126

123-
}
127+
}

0 commit comments

Comments
 (0)