aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xrun_tests.bash203
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/FilteringProgram.scala18
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala89
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala16
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala2
5 files changed, 263 insertions, 65 deletions
diff --git a/run_tests.bash b/run_tests.bash
new file mode 100755
index 0000000..17cbf01
--- /dev/null
+++ b/run_tests.bash
@@ -0,0 +1,203 @@
1#!/usr/bin/env bash
2
3NC='\033[0m'
4RED='\033[0;31m'
5GREEN='\033[0;32m'
6YELLOW='\033[0;33m'
7
8msg_info() {
9 echo -e "${GREEN}$1${NC}"
10}
11
12msg_warn() {
13 echo -e "${YELLOW}$1${NC}"
14}
15
16msg_error() {
17 echo -e "${RED}$1${NC}"
18}
19
20print_help() {
21 echo
22 echo "testRSA - a quick script to run tests all night long"
23 echo
24 echo "USAGE:"
25 echo " testRSA OPTION [...]"
26 echo
27 echo "OPTIONs are:"
28 echo " -o | --ontology <path>:"
29 echo " path to ontology."
30 echo " -d | --data <path>:"
31 echo " path to a folder containing data for the ontology."
32 echo " -q | --queries <path>:"
33 echo " path to a folder containing SPARQL query files to be"
34 echo " executed against the ontology and data."
35 echo " -p | --prefix <path>:"
36 echo " provides a folder to prefix to the output files."
37 echo " Defaults to './results'."
38 echo " -h | -? | --help:"
39 echo " print this help"
40 echo
41}
42
43ONTOLOGY=""
44DATA=""
45QUERIES=""
46PREFIX="./results"
47
48while [[ $# -gt 0 ]]
49do
50 case $1 in
51 -o|--ontology)
52 shift
53 ONTOLOGY="$1"
54 [ ! -r "$ONTOLOGY" ] && \
55 msg_error "Unable to read '$ONTOLOGY'" && \
56 print_help && \
57 exit 2
58 ;;
59 -d|--data)
60 shift
61 DATA="$1"
62 [ ! -d "$DATA" ] && \
63 msg_error "'$DATA' is not a directory" && \
64 print_help && \
65 exit 2
66 ;;
67 -q|--queries)
68 shift
69 QUERIES="$1"
70 [ ! -d "$QUERIES" ] && \
71 msg_error "'$QUERIES' is not a directory" && \
72 print_help && \
73 exit 2
74 ;;
75 -p|--prefix)
76 shift
77 PREFIX="$1"
78 ;;
79 -h|-?|--help)
80 print_help
81 exit 0
82 ;;
83 *)
84 msg_error "$OPTION: invalid option"
85 print_help
86 exit 1
87 ;;
88 esac
89 shift
90done
91
92[ -z "$ONTOLOGY" ] && \
93 msg_error "Use -o | --ontology to provide an ontology file" && \
94 print_help && \
95 exit 3
96
97[ -z "$DATA" ] && \
98 msg_error "Use -d | --data to provide a data folder" && \
99 print_help && \
100 exit 3
101
102[ -z "$QUERIES" ] && \
103 msg_error "Use -q | --queries to provide a query folder" && \
104 print_help && \
105 exit 3
106
107
108DATAS=`\ls $DATA/*`
109mkdir -p "$PREFIX"
110for QUERY in "$QUERIES"/*.sparql
111do
112 sbt "run $QUERY $ONTOLOGY $DATAS" 2>&1 | tee "$PREFIX/answers_$(basename $QUERY .sparql).txt"
113done
114
115OUTPUT="$PREFIX/results.csv"
116echo "NAME, TBOX, RBOX, ABOX, \
117 CANONICAL MODEL GENERATION, \
118 CANONICAL MODEL RULES, CANONICAL MODEL RULES LOADING, \
119 CANONICAL MODEL FACTS, CANONICAL MODEL FACTS LOADING, \
120 CANONICAL MODEL IDB, CANONICAL MODEL EDB, \
121 FILTERING PROGRAM GENERATION, \
122 FILTERING PROGRAM RULES, FILTERING PROGRAM RULES LOADING, \
123 FILTERING PROGRAM FACTS, FILTERING PROGRAM FACTS LOADING, \
124 FILTERING PROGRAM IDB, FILTERING PROGRAM EDB, \
125 ANSWERING TIME, #ANSWERS, #UNFILTERED, #SPURIOUS, %SPURIOUS" > "$OUTPUT"
126
127for RESULT in "$PREFIX"/*.txt
128do
129 awk -v filename="$RESULT" '
130 BEGIN {
131 OFS = ", "
132 name = filename
133 sub("^.*answers_", "", name)
134 sub(".txt$", "", name)
135 }
136 /Original TBox/ { tbox_size = $NF }
137 /Original RBox/ { rbox_size = $NF }
138 /Original ABox/ { abox_size = $NF }
139 /Generating canonical model program \(END\)/ { canon_gen_time = $NF }
140 /Generating filtering program \(END\)/ { filter_gen_time = $NF }
141 /Canonical model rules/ {
142 canon_rules = $NF
143 canon = 1
144 }
145 /Canonical model facts/ {
146 canon_facts = $NF
147 canon = 1
148 }
149 /Filtering program rules/ {
150 filter_rules = $NF
151 }
152 /Filtering program facts/ {
153 filter_facts = $NF
154 }
155 /Loading rules \(END\)/ {
156 if (canon) {
157 canon_rules_load = $NF
158 } else {
159 filter_rules_load = $NF
160 }
161 }
162 /Loading facts/ {
163 if (canon) {
164 canon_facts_load = $NF
165 } else {
166 filter_facts_load = $NF
167 }
168 }
169 /Aggregate number of IDB facts/ {
170 sub("^.*=", "")
171 sub(",$", "")
172 if (canon) {
173 canon_idb = $0
174 } else {
175 filter_idb = $0
176 }
177 }
178 /Aggregate number of EDB facts/ {
179 sub("^.*=", "")
180 sub(",$", "")
181 if (canon) {
182 canon_edb = $0
183 canon = 0
184 } else {
185 filter_edb = $0
186 }
187 }
188 /Answers computation \(END\)/ { answers_time = $NF }
189 /Number of answers/ { answers = $(NF-1) }
190 /Number of unfiltered answers/ { unfiltered = $(NF-1) }
191 /Number of spurious answers/ { spurious = $(NF-1) }
192 /Percentage of spurious answers/ { spurious_perc = $NF }
193 END { print name, tbox_size, rbox_size, abox_size, \
194 canon_gen_time, canon_rules, canon_rules_load, canon_facts, canon_facts_load, \
195 canon_idb, canon_edb, \
196 filter_gen_time, filter_rules, filter_rules_load, filter_facts, filter_facts_load, \
197 filter_idb, filter_edb, \
198 answers_time, answers, unfiltered, spurious, spurious_perc
199 }
200 ' "$RESULT" >> "$OUTPUT"
201done
202
203exit 0
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/FilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/FilteringProgram.scala
index 4e533c6..9427735 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/FilteringProgram.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/FilteringProgram.scala
@@ -126,9 +126,9 @@ class FilteringProgram(query: ConjunctiveQuery, constants: List[Term]) {
126 if index2 >= 0 126 if index2 >= 0
127 } yield Rule.create( 127 } yield Rule.create(
128 RSA.FK, 128 RSA.FK,
129 RSA.ID(RSA(index1), RSA(index2)),
129 role1 << Forward, 130 role1 << Forward,
130 role2 << Forward, 131 role2 << Forward,
131 RSA.ID(RSA(index1), RSA(index2)),
132 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 0)) 132 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 0))
133 ) 133 )
134 val r4b = for { 134 val r4b = for {
@@ -140,9 +140,9 @@ class FilteringProgram(query: ConjunctiveQuery, constants: List[Term]) {
140 if index2 >= 0 140 if index2 >= 0
141 } yield Rule.create( 141 } yield Rule.create(
142 RSA.FK, 142 RSA.FK,
143 RSA.ID(RSA(index1), RSA(index2)),
143 role1 << Forward, 144 role1 << Forward,
144 role2 << Backward, 145 role2 << Backward,
145 RSA.ID(RSA(index1), RSA(index2)),
146 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 2)) 146 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 2))
147 ) 147 )
148 val r4c = for { 148 val r4c = for {
@@ -154,9 +154,9 @@ class FilteringProgram(query: ConjunctiveQuery, constants: List[Term]) {
154 if index2 >= 0 154 if index2 >= 0
155 } yield Rule.create( 155 } yield Rule.create(
156 RSA.FK, 156 RSA.FK,
157 RSA.ID(RSA(index1), RSA(index2)),
157 role1 << Backward, 158 role1 << Backward,
158 role2 << Backward, 159 role2 << Backward,
159 RSA.ID(RSA(index1), RSA(index2)),
160 not(RSA.Congruent(role1.getArguments get 2, role2.getArguments get 2)) 160 not(RSA.Congruent(role1.getArguments get 2, role2.getArguments get 2))
161 ) 161 )
162 162
@@ -180,13 +180,13 @@ class FilteringProgram(query: ConjunctiveQuery, constants: List[Term]) {
180 RSA(query.bounded indexOf r1arg0), 180 RSA(query.bounded indexOf r1arg0),
181 RSA(query.bounded indexOf r2arg0) 181 RSA(query.bounded indexOf r2arg0)
182 ), 182 ),
183 role1 << Forward,
184 role2 << Forward,
185 RSA.ID( 183 RSA.ID(
186 RSA(query.bounded indexOf r1arg2), 184 RSA(query.bounded indexOf r1arg2),
187 RSA(query.bounded indexOf r2arg2) 185 RSA(query.bounded indexOf r2arg2)
188 ), 186 ),
189 RSA.Congruent(r1arg0, r2arg0), 187 RSA.Congruent(r1arg0, r2arg0),
188 role1 << Forward,
189 role2 << Forward,
190 not(RSA.NI(r1arg0)) 190 not(RSA.NI(r1arg0))
191 ) 191 )
192 val r5b = for { 192 val r5b = for {
@@ -205,13 +205,13 @@ class FilteringProgram(query: ConjunctiveQuery, constants: List[Term]) {
205 RSA(query.bounded indexOf r1arg0), 205 RSA(query.bounded indexOf r1arg0),
206 RSA(query.bounded indexOf r2arg2) 206 RSA(query.bounded indexOf r2arg2)
207 ), 207 ),
208 role1 << Forward,
209 role2 << Backward,
210 RSA.ID( 208 RSA.ID(
211 RSA(query.bounded indexOf r1arg2), 209 RSA(query.bounded indexOf r1arg2),
212 RSA(query.bounded indexOf r2arg0) 210 RSA(query.bounded indexOf r2arg0)
213 ), 211 ),
214 RSA.Congruent(r1arg0, r2arg2), 212 RSA.Congruent(r1arg0, r2arg2),
213 role1 << Forward,
214 role2 << Backward,
215 not(RSA.NI(r1arg0)) 215 not(RSA.NI(r1arg0))
216 ) 216 )
217 val r5c = for { 217 val r5c = for {
@@ -230,13 +230,13 @@ class FilteringProgram(query: ConjunctiveQuery, constants: List[Term]) {
230 RSA(query.bounded indexOf r1arg2), 230 RSA(query.bounded indexOf r1arg2),
231 RSA(query.bounded indexOf r2arg2) 231 RSA(query.bounded indexOf r2arg2)
232 ), 232 ),
233 role1 << Backward,
234 role2 << Backward,
235 RSA.ID( 233 RSA.ID(
236 RSA(query.bounded indexOf r1arg0), 234 RSA(query.bounded indexOf r1arg0),
237 RSA(query.bounded indexOf r2arg0) 235 RSA(query.bounded indexOf r2arg0)
238 ), 236 ),
239 RSA.Congruent(r1arg2, r2arg2), 237 RSA.Congruent(r1arg2, r2arg2),
238 role1 << Backward,
239 role2 << Backward,
240 not(RSA.NI(r1arg2)) 240 not(RSA.NI(r1arg2))
241 ) 241 )
242 242
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
index f54884f..bf96a31 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
@@ -1,37 +1,41 @@
1package uk.ac.ox.cs.rsacomb 1package uk.ac.ox.cs.rsacomb
2 2
3/* Java imports */
4import java.io.File 3import java.io.File
5import java.util.HashMap 4import java.util.HashMap
6import scala.collection.JavaConverters._ 5import scala.collection.JavaConverters._
7
8import tech.oxfordsemantic.jrdfox.client.UpdateType 6import tech.oxfordsemantic.jrdfox.client.UpdateType
9import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery
10import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term} 7import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term}
8import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery
11 9
12/* Local imports */
13import util.{Logger, RDFoxUtil, RSA} 10import util.{Logger, RDFoxUtil, RSA}
14import sparql.ConjunctiveQuery 11import sparql.ConjunctiveQuery
15 12
13/** Entry point of the program.
14 *
15 * The executable expects a SPARQL query and a non-empty sequence of
16 * ontology files as arguments. The query file is expected to contain
17 * exactly one query, while the ontology files will be programmatically
18 * merged in a single ontology.
19 *
20 * @todo better arguments handling is needed. Look into some library
21 * for this.
22 * @todo at the moment the input ontology is assumed to be Horn-ALCHOIQ.
23 * This might not be the case.
24 */
16object RSAComb extends App { 25object RSAComb extends App {
17 26
18 val help: String = """ 27 val help: String = """
19 rsacomb - combined approach for CQ answering for RSA ontologies. 28 rsacomb - combined approach for CQ answering for RSA ontologies.
20 29
21 USAGE 30 USAGE
22 rsacomb <query> <ontology> ... 31 rsacomb <query> <ontology> [...]
23 32
24 where 33 where
25 - query: a (single) SPARQL query file. 34 - query: path to a file containing a single SPARQL query
26 - ontology: one or more ontologies. 35 - ontology: one or more ontology files
27 36
28 """ 37 """
29 38
30 /* Simple arguments handling
31 *
32 * TODO: use something better later on
33 */
34
35 if (args.length < 2) { 39 if (args.length < 2) {
36 println(help) 40 println(help)
37 sys.exit; 41 sys.exit;
@@ -46,52 +50,41 @@ object RSAComb extends App {
46 sys.exit; 50 sys.exit;
47 } 51 }
48 52
49 /* TODO: It might be required to check if the ontology in input is
50 * Horn-ALCHOIQ. At the moment we are assuming this is always the
51 * case.
52 */
53
54 val ontology = RSAOntology(ontoPaths: _*) 53 val ontology = RSAOntology(ontoPaths: _*)
55 if (ontology.isRSA) { 54 if (ontology.isRSA) {
56 55
57 Logger print "Ontology is RSA!" 56 Logger print "Ontology is RSA!"
58 57
59 /** Read SPARQL query from file */ 58 val query = RDFoxUtil.loadQueryFromFile(queryPath.getAbsoluteFile)
60 val strQuery = RDFoxUtil.loadQueryFromFile(queryPath.getAbsoluteFile)
61 val query = ConjunctiveQuery parse strQuery
62 59
63 query match { 60 ConjunctiveQuery.parse(query) match {
64 case Some(query) => { 61 case Some(query) => {
65 val answers = ontology ask query 62 val answers = ontology ask query
66 Logger.print(s"$answers", Logger.QUIET) 63 Logger.print(s"$answers", Logger.QUIET)
67 Logger print s"Number of answer: ${answers.length} (${answers.lengthWithMultiplicity})" 64 Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})"
68 65
69 val unfiltered = ontology askUnfiltered query 66 /* Additional DEBUG information */
70 unfiltered map { u => 67 if (Logger.level >= Logger.DEBUG) {
71 Logger.print( 68 /* Unfiltered rules */
72 s"Number of unfiltered answers: ${u.length} (${u.map(_._1).sum}).", 69 val unfiltered = ontology askUnfiltered query
73 Logger.DEBUG 70 unfiltered map { u =>
74 ) 71 Logger print s"Number of unfiltered answers: ${u.length} (${u.map(_._1).sum})."
75 //u foreach println 72
76 val spurious = { 73 /* Spurious answers */
77 val sp = 74 val spurious = {
78 RDFoxUtil.buildDescriptionQuery("SP", query.variables.length) 75 val variables = query.variables.length
79 ontology.queryDataStore(query, sp, RSA.Prefixes) 76 val sp = RDFoxUtil.buildDescriptionQuery("SP", variables)
80 } 77 ontology.queryDataStore(query, sp, RSA.Prefixes)
81 spurious map { s => 78 }
82 Logger.print( 79 spurious map { s =>
83 s"Number of spurious answers: ${s.length} (${s.map(_._1).sum})", 80 Logger print s"Number of spurious answers: ${s.length} (${s.map(_._1).sum})"
84 Logger.DEBUG 81
85 ) 82 /* Spurious/unfiltered percentage */
86 //s foreach println 83 val perc =
87 val perc = 84 if (u.length > 0) (s.length / u.length.toFloat) * 100 else 0
88 if (u.length > 0) (s.length / u.length.toFloat) * 100 else 0 85 Logger print s"Percentage of spurious answers: $perc%"
89 Logger.print( 86 }
90 s"Percentage of spurious answers: $perc%",
91 Logger.DEBUG
92 )
93 } 87 }
94
95 } 88 }
96 } 89 }
97 case None => 90 case None =>
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
index 4ac5a77..8d5bf4c 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
@@ -114,9 +114,9 @@ class RSAOntology(val ontology: OWLOntology) {
114 114
115 val axioms: List[OWLLogicalAxiom] = abox ::: tbox ::: rbox 115 val axioms: List[OWLLogicalAxiom] = abox ::: tbox ::: rbox
116 116
117 Logger.print(s"Original TBox: ${tbox.length} axioms", Logger.DEBUG) 117 Logger.print(s"Original TBox: ${tbox.length}", Logger.DEBUG)
118 Logger.print(s"Original RBox: ${rbox.length} axioms", Logger.DEBUG) 118 Logger.print(s"Original RBox: ${rbox.length}", Logger.DEBUG)
119 Logger.print(s"Original ABox: ${abox.length} axioms", Logger.DEBUG) 119 Logger.print(s"Original ABox: ${abox.length}", Logger.DEBUG)
120 120
121 /* Retrieve individuals in the original ontology 121 /* Retrieve individuals in the original ontology
122 */ 122 */
@@ -358,16 +358,18 @@ class RSAOntology(val ontology: OWLOntology) {
358 358
359 //data.beginTransaction(TransactionType.READ_WRITE) 359 //data.beginTransaction(TransactionType.READ_WRITE)
360 360
361 Logger print s"Canonical model: ${canon.rules.length} rules" 361 Logger print s"Canonical model rules: ${canon.rules.length}"
362 RDFoxUtil.addRules(data, this.canonicalModel.rules) 362 RDFoxUtil.addRules(data, this.canonicalModel.rules)
363 363
364 Logger print s"Canonical model: ${canon.facts.length} facts" 364 Logger print s"Canonical model facts: ${canon.facts.length}"
365 RDFoxUtil.addFacts(data, this.canonicalModel.facts) 365 RDFoxUtil.addFacts(data, this.canonicalModel.facts)
366 366
367 Logger print s"Filtering program: ${filter.facts.length} facts" 367 RDFoxUtil printStatisticsFor data
368
369 Logger print s"Filtering program facts: ${filter.facts.length}"
368 RDFoxUtil.addFacts(data, filter.facts) 370 RDFoxUtil.addFacts(data, filter.facts)
369 371
370 Logger print s"Filtering program: ${filter.rules.length} rules" 372 Logger print s"Filtering program rules: ${filter.rules.length}"
371 RDFoxUtil.addRules(data, filter.rules) 373 RDFoxUtil.addRules(data, filter.rules)
372 374
373 //data.commitTransaction() 375 //data.commitTransaction()
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
index 74797a2..56e9de0 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
@@ -38,7 +38,7 @@ object Logger {
38 print(s"$desc (START)", lvl) 38 print(s"$desc (START)", lvl)
39 val result = expr 39 val result = expr
40 val t1 = System.currentTimeMillis() 40 val t1 = System.currentTimeMillis()
41 print(s"$desc (END): ${(t1 - t0).toFloat / 1000}s", lvl) 41 print(s"$desc (END): ${(t1 - t0).toFloat / 1000}", lvl)
42 result 42 result
43 } 43 }
44 44