aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala21
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala2
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala5
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala79
4 files changed, 91 insertions, 16 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
index 302db75..fe7a6db 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
@@ -42,10 +42,9 @@ object RSAComb extends App {
42 Logger.level = config('logger).get[Logger.Level] 42 Logger.level = config('logger).get[Logger.Level]
43 43
44 /* Load original ontology and normalize it */ 44 /* Load original ontology and normalize it */
45 val ontology = Ontology( 45 val ontopath = config('ontology).get[os.Path]
46 config('ontology).get[os.Path], 46 val data = config('data).get[List[os.Path]]
47 config('data).get[List[os.Path]] 47 val ontology = Ontology(ontopath, data).normalize(new Normalizer)
48 ).normalize(new Normalizer)
49 48
50 //ontology.axioms foreach println 49 //ontology.axioms foreach println
51 50
@@ -62,12 +61,14 @@ object RSAComb extends App {
62 val answers = rsa ask queries 61 val answers = rsa ask queries
63 62
64 /* Write answers to output file */ 63 /* Write answers to output file */
65 if (config.contains('answers)) 64 os.write(
66 os.write( 65 config('answers).get[os.Path],
67 config('answers).get[os.Path], 66 ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 2),
68 ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 2), 67 createFolders = true
69 createFolders = true 68 )
70 ) 69
70 /* Generate simulation script */
71 Logger.generateSimulationScripts(data, queries)
71 72
72 // Logger.print(s"$answers", Logger.VERBOSE) 73 // Logger.print(s"$answers", Logger.VERBOSE)
73 // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" 74 // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})"
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala
index 4d96850..f3039b3 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala
@@ -149,6 +149,8 @@ object RSAConfig {
149 exit("The following flag is mandatory: '-o' or '--ontology'.") 149 exit("The following flag is mandatory: '-o' or '--ontology'.")
150 if (!config.contains('data)) 150 if (!config.contains('data))
151 config += ('data -> List.empty[os.Path]) 151 config += ('data -> List.empty[os.Path])
152 if (!config.contains('answers))
153 config += ('answers -> Logger.dir / "answers.json")
152 config 154 config
153 } 155 }
154} 156}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
index 4b8e015..5e864bb 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
@@ -623,7 +623,10 @@ class RSAOntology(
623 623
624 /* Add filtering program */ 624 /* Add filtering program */
625 Logger print s"Filtering program rules: ${filter.rules.length}" 625 Logger print s"Filtering program rules: ${filter.rules.length}"
626 Logger.write(filter.rules.mkString("\n"), s"filter_query${query.id}.dlog") 626 Logger.write(
627 filter.rules.mkString("\n"),
628 s"filter_query_${query.id}.dlog"
629 )
627 RDFoxUtil.addRules(data, filter.rules) 630 RDFoxUtil.addRules(data, filter.rules)
628 631
629 // TODO: We remove the rules, should we drop the tuple table as well? 632 // TODO: We remove the rules, should we drop the tuple table as well?
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
index b86230e..0fcde53 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
@@ -19,14 +19,12 @@ package uk.ac.ox.cs.rsacomb.util
19import java.util.Calendar 19import java.util.Calendar
20import java.text.SimpleDateFormat 20import java.text.SimpleDateFormat
21import java.io.PrintStream 21import java.io.PrintStream
22import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery
22 23
23/** Rough implementation of a logger. 24/** Simple logger */
24 *
25 * This is a WIP class for debugging and benchmarking.
26 */
27object Logger { 25object Logger {
28 26
29 private lazy val dir = { 27 lazy val dir = {
30 val timestamp = (new SimpleDateFormat("yyyyMMddHHmmss")).format( 28 val timestamp = (new SimpleDateFormat("yyyyMMddHHmmss")).format(
31 Calendar.getInstance().getTime 29 Calendar.getInstance().getTime
32 ) 30 )
@@ -52,10 +50,21 @@ object Logger {
52 /** Currend logger level */ 50 /** Currend logger level */
53 var level: Level = DEBUG 51 var level: Level = DEBUG
54 52
53 /** Print a line padded with logger level and timestamp.
54 *
55 * @param str object to be printed.
56 * @param lvl minimum logger level required to print.
57 */
55 def print(str: Any, lvl: Level = NORMAL): Unit = 58 def print(str: Any, lvl: Level = NORMAL): Unit =
56 if (lvl <= level) 59 if (lvl <= level)
57 output println s"[$lvl][${Calendar.getInstance().getTime}] $str" 60 output println s"[$lvl][${Calendar.getInstance().getTime}] $str"
58 61
62 /** Write provided content to file.
63 *
64 * @param content content to append to the file.
65 * @param file name of the file to append the content to.
66 * @param lvl minimum logger level required to write.
67 */
59 def write(content: => os.Source, file: String, lvl: Level = VERBOSE): Unit = 68 def write(content: => os.Source, file: String, lvl: Level = VERBOSE): Unit =
60 if (lvl <= level) 69 if (lvl <= level)
61 os.write.append(dir / file, content) 70 os.write.append(dir / file, content)
@@ -69,4 +78,64 @@ object Logger {
69 result 78 result
70 } 79 }
71 80
81 /** Generate simulation scripts for current run
82 *
83 * @param data data files to be imported.
84 * @param queries collection of executed queries.
85 * @param lvl minimum logger level required.
86 */
87 def generateSimulationScripts(
88 data: Seq[os.Path],
89 queries: Seq[ConjunctiveQuery],
90 lvl: Level = VERBOSE
91 ): Unit =
92 if (lvl <= level) {
93 /* Create script folder */
94 val sim = os.rel / 'sim
95 os.makeDir(dir / sim)
96 /* Generate main script */
97 os.write.append(
98 dir / "simulate.rdfox",
99 """
100echo "\n[Start endpoint]"
101endpoint start
102
103echo "\n[Create new datastore]"
104dstore create rsacomb
105active rsacomb
106prefix rsacomb: <http://www.cs.ox.ac.uk/isg/RSAComb#>
107tupletable create rsacomb:CanonicalModel type "named-graph"
108
109echo "\n[Import data]"
110""" ++
111 data
112 .map(d => s"""import > rsacomb:CanonicalModel \"$d\"""")
113 .mkString("\n")
114 ++ s"""
115import "axiomatisation.dlog"
116insert { graph rsacomb:CanonicalModel { ?x a rsacomb:Named } } where { graph rsacomb:CanonicalModel { ?x a owl:Thing } }
117
118echo "\\n[Load canonical model program]"
119import "canonical_model.dlog"
120
121exec "$sim/filter_query_$$(1).rdfox"
122"""
123 )
124 /* Generate query scripts */
125 queries.map(q => {
126 val id = q.id
127 os.write.append(
128 dir / sim / "filter_query_all.rdfox",
129 s"exec $sim/filter_query_$id.rdfox\n"
130 )
131 os.write.append(
132 dir / sim / s"filter_query_$id.rdfox",
133 s"""
134echo "\\n[Load filtering program for query $id]"
135tupletable create rsacomb:Filter$id type "named-graph"
136import "filter_query_$id.dlog"
137"""
138 )
139 })
140 }
72} 141}