From 47a89f67f4acdbd55fd1cbfb704211937962e522 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Wed, 3 Nov 2021 09:54:02 +0000 Subject: Avoid generating a JSON string before writing to file This solve a crash when considering a huge amount of queries. --- project/Dependencies.scala | 2 +- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 11 +++--- src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala | 2 - .../scala/uk/ac/ox/cs/rsacomb/util/Logger.scala | 45 +++++++++++++++++----- 4 files changed, 41 insertions(+), 19 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 336f052..8341b8d 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -5,7 +5,7 @@ object Dependencies { lazy val scalatestVersion = "3.2.3" lazy val owlapiVersion = "5.1.17" lazy val scalagraphVersion = "1.13.2" - lazy val ujsonVersion = "1.4.1" + lazy val ujsonVersion = "1.4.2" lazy val oslibVersion = "0.7.8" // Libraries diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index 7eb39e8..fe88b4f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -41,6 +41,10 @@ object RSAComb extends App { if (config.contains('logger)) Logger.level = config('logger).get[Logger.Level] + /* Set answers output file */ + if (config.contains('answers)) + Logger.answers = config('answers).get[os.Path] + /* Load original ontology and normalize it */ val ontopath = config('ontology).get[os.Path] val data = config('data).get[List[os.Path]] @@ -61,12 +65,7 @@ object RSAComb extends App { val answers = rsa ask queries /* Write answers to output file */ - os.write( - config('answers).get[os.Path], - ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 2), - createFolders = true - ) - + Logger write answers /* Generate simulation script */ Logger.generateSimulationScripts(data, queries) } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala index f3039b3..4d96850 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala @@ -149,8 +149,6 @@ object RSAConfig { exit("The following flag is mandatory: '-o' or '--ontology'.") if (!config.contains('data)) config += ('data -> List.empty[os.Path]) - if (!config.contains('answers)) - config += ('answers -> Logger.dir / "answers.json") config } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala index f3b0232..20fd4c3 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala @@ -19,23 +19,25 @@ package uk.ac.ox.cs.rsacomb.util import java.util.Calendar import java.text.SimpleDateFormat import java.io.PrintStream -import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery +import uk.ac.ox.cs.rsacomb.sparql.{ConjunctiveQuery, ConjunctiveQueryAnswers} /** Simple logger */ object Logger { - lazy val dir = { + /** Main directory for logger output for the current run */ + val dir = { val timestamp = (new SimpleDateFormat("yyyyMMddHHmmss")).format( Calendar.getInstance().getTime ) - val dir = os.pwd / s"rsacomb-$timestamp" - os.makeDir(dir) - dir + os.pwd / s"rsacomb-$timestamp" } /** Output stream for the logger. */ var output: PrintStream = System.out + /** Path to answers output file */ + var answers: os.Path = dir / "answers.json" + /** Logger levels (i.e., verbosity of output) */ sealed abstract class Level(val level: Int, val name: String) extends Ordered[Level] { @@ -67,8 +69,29 @@ object Logger { */ def write(content: => os.Source, file: String, lvl: Level = VERBOSE): Unit = if (lvl <= level) - os.write.append(dir / file, content) + os.write.append(dir / file, content, createFolders = true) + + /** Write answers to queries to output file in JSON format. + * + * @param ans the set of answers to be written. + */ + def write(ans: Seq[ConjunctiveQueryAnswers]): Unit = { + ujson.writeToOutputStream( + ujson.Arr(ans.map(_.toJSON)), + os.write.outputStream(answers, createFolders = true), + indent = 2 + ) + } + /** Timed evaluation of an expression. + * + * Records and outputs additional information about evaluation time. + * + * @param expr expression to be evaluated. + * @param desc short description of the expression. + * @param lvl minimum require logger level for output + * @return the result of the evaluation. + */ def timed[A](expr: => A, desc: String = "", lvl: Level = NORMAL): A = { val t0 = System.currentTimeMillis() print(s"$desc (START)", lvl) @@ -92,7 +115,6 @@ object Logger { if (lvl <= level) { /* Create script folder */ val sim = os.rel / 'sim - os.makeDir(dir / sim) /* Generate main script */ os.write.append( dir / "simulate.rdfox", @@ -121,14 +143,16 @@ echo "\\n[Load canonical model program]" import "canonical_model.dlog" exec "$sim/filter_query_$$(1).rdfox" -""" +""", + createFolders = true ) /* Generate query scripts */ queries.map(q => { val id = q.id os.write.append( dir / sim / "filter_query_all.rdfox", - s"exec $sim/filter_query_$id.rdfox\n" + s"exec $sim/filter_query_$id.rdfox\n", + createFolders = true ) os.write.append( dir / sim / s"filter_query_$id.rdfox", @@ -136,7 +160,8 @@ exec "$sim/filter_query_$$(1).rdfox" echo "\\n[Load filtering program for query $id]" tupletable create rsacomb:Filter$id type "named-graph" import "filter_query_$id.dlog" -""" +""", + createFolders = true ) }) } -- cgit v1.2.3