aboutsummaryrefslogtreecommitdiff
path: root/src/main/scala
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/scala')
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala49
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala165
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala154
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala291
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala38
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala178
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala334
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala174
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala30
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala302
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala217
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala4
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala96
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala55
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/package.scala21
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala79
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala19
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala24
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala29
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala19
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala160
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala89
22 files changed, 1586 insertions, 941 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala
index ca54054..bd3d3c3 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala
@@ -31,7 +31,8 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{
31 BodyFormula, 31 BodyFormula,
32 Negation, 32 Negation,
33 Rule, 33 Rule,
34 TupleTableAtom 34 TupleTableAtom,
35 TupleTableName
35} 36}
36import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term, Variable} 37import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term, Variable}
37 38
@@ -39,7 +40,7 @@ import implicits.JavaCollections._
39 40
40import uk.ac.ox.cs.rsacomb.converter._ 41import uk.ac.ox.cs.rsacomb.converter._
41import uk.ac.ox.cs.rsacomb.suffix._ 42import uk.ac.ox.cs.rsacomb.suffix._
42import uk.ac.ox.cs.rsacomb.util.RSA 43import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA}
43 44
44/** Canonical model generator 45/** Canonical model generator
45 * 46 *
@@ -48,8 +49,9 @@ import uk.ac.ox.cs.rsacomb.util.RSA
48 * (via materialization). 49 * (via materialization).
49 * 50 *
50 * @param ontology the RSA ontology the canonical model is targeting. 51 * @param ontology the RSA ontology the canonical model is targeting.
52 * @param graph the graph the canonical model will be generated into.
51 */ 53 */
52class CanonicalModel(val ontology: RSAOntology) { 54class CanonicalModel(val ontology: RSAOntology, val graph: IRI) {
53 55
54 /** Simplify conversion between OWLAPI and RDFox concepts */ 56 /** Simplify conversion between OWLAPI and RDFox concepts */
55 import implicits.RDFox._ 57 import implicits.RDFox._
@@ -65,7 +67,8 @@ class CanonicalModel(val ontology: RSAOntology) {
65 * versions need to be explicitly stated in terms of logic rules. 67 * versions need to be explicitly stated in terms of logic rules.
66 */ 68 */
67 val rolesAdditionalRules: List[Rule] = { 69 val rolesAdditionalRules: List[Rule] = {
68 ontology.roles 70 val tt = TupleTableName.create(graph.getIRI)
71 ontology.objroles
69 .collect { case prop: OWLObjectProperty => prop } 72 .collect { case prop: OWLObjectProperty => prop }
70 .flatMap((pred) => { 73 .flatMap((pred) => {
71 val iri = pred.getIRI.getIRIString 74 val iri = pred.getIRI.getIRIString
@@ -83,8 +86,8 @@ class CanonicalModel(val ontology: RSAOntology) {
83 ) 86 )
84 ) 87 )
85 yield Rule.create( 88 yield Rule.create(
86 TupleTableAtom.rdf(varX, iri :: hSuffix, varY), 89 TupleTableAtom.create(tt, varX, iri :: hSuffix, varY),
87 TupleTableAtom.rdf(varX, iri :: bSuffix, varY) 90 TupleTableAtom.create(tt, varX, iri :: bSuffix, varY)
88 ) 91 )
89 }) 92 })
90 } 93 }
@@ -92,7 +95,7 @@ class CanonicalModel(val ontology: RSAOntology) {
92 val (facts, rules): (List[TupleTableAtom], List[Rule]) = { 95 val (facts, rules): (List[TupleTableAtom], List[Rule]) = {
93 // Compute rules from ontology axioms 96 // Compute rules from ontology axioms
94 val (facts, rules) = { 97 val (facts, rules) = {
95 val term = RSAUtil.genFreshVariable() 98 val term = Variable.create("X")
96 val unsafe = ontology.unsafe 99 val unsafe = ontology.unsafe
97 ontology.axioms 100 ontology.axioms
98 .map(a => 101 .map(a =>
@@ -108,6 +111,8 @@ class CanonicalModel(val ontology: RSAOntology) {
108 111
109 object CanonicalModelConverter extends RDFoxConverter { 112 object CanonicalModelConverter extends RDFoxConverter {
110 113
114 override val graph = TupleTableName.create(CanonicalModel.this.graph.getIRI)
115
111 private def rules1( 116 private def rules1(
112 axiom: OWLSubClassOfAxiom 117 axiom: OWLSubClassOfAxiom
113 ): Result = { 118 ): Result = {
@@ -115,11 +120,10 @@ class CanonicalModel(val ontology: RSAOntology) {
115 // Fresh Variables 120 // Fresh Variables
116 val v0 = RSA("v0_" ++ axiom.hashed) 121 val v0 = RSA("v0_" ++ axiom.hashed)
117 val varX = Variable.create("X") 122 val varX = Variable.create("X")
118 implicit val unfoldTerm = RSA(unfold.hashCode.toString)
119 // TODO: use axiom.toTriple instead 123 // TODO: use axiom.toTriple instead
120 val atomA: TupleTableAtom = { 124 val atomA: TupleTableAtom = {
121 val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI 125 val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI
122 TupleTableAtom.rdf(varX, IRI.RDF_TYPE, cls) 126 TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, cls)
123 } 127 }
124 val roleRf: TupleTableAtom = { 128 val roleRf: TupleTableAtom = {
125 val prop = 129 val prop =
@@ -132,12 +136,15 @@ class CanonicalModel(val ontology: RSAOntology) {
132 .getFiller 136 .getFiller
133 .asInstanceOf[OWLClass] 137 .asInstanceOf[OWLClass]
134 .getIRI 138 .getIRI
135 TupleTableAtom.rdf(v0, IRI.RDF_TYPE, cls) 139 TupleTableAtom.create(graph, v0, IRI.RDF_TYPE, cls)
136 } 140 }
137 val facts = unfold map RSA.In 141 val unfoldSet = RSA(unfold.hashCode.toString)
142 val facts = unfold.map(TupleTableAtom.create(graph, _, RSA.IN, unfoldSet))
143 val notInX =
144 Negation.create(TupleTableAtom.create(graph, varX, RSA.IN, unfoldSet))
138 val rules = List( 145 val rules = List(
139 Rule.create(roleRf, atomA, RSA.NotIn(varX)), 146 Rule.create(roleRf, atomA, notInX),
140 Rule.create(atomB, atomA, RSA.NotIn(varX)) 147 Rule.create(atomB, atomA, notInX)
141 ) 148 )
142 (facts, rules) 149 (facts, rules)
143 } 150 }
@@ -155,7 +162,7 @@ class CanonicalModel(val ontology: RSAOntology) {
155 // Predicates 162 // Predicates
156 def atomA(t: Term): TupleTableAtom = { 163 def atomA(t: Term): TupleTableAtom = {
157 val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI 164 val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI
158 TupleTableAtom.rdf(t, IRI.RDF_TYPE, cls) 165 TupleTableAtom.create(graph, t, IRI.RDF_TYPE, cls)
159 } 166 }
160 def roleRf(t1: Term, t2: Term): TupleTableAtom = 167 def roleRf(t1: Term, t2: Term): TupleTableAtom =
161 super.convert(roleR, t1, t2, Forward) 168 super.convert(roleR, t1, t2, Forward)
@@ -165,7 +172,7 @@ class CanonicalModel(val ontology: RSAOntology) {
165 .getFiller 172 .getFiller
166 .asInstanceOf[OWLClass] 173 .asInstanceOf[OWLClass]
167 .getIRI 174 .getIRI
168 TupleTableAtom.rdf(t, IRI.RDF_TYPE, cls) 175 TupleTableAtom.create(graph, t, IRI.RDF_TYPE, cls)
169 } 176 }
170 //Rules 177 //Rules
171 List( 178 List(
@@ -190,7 +197,7 @@ class CanonicalModel(val ontology: RSAOntology) {
190 // Predicates 197 // Predicates
191 def atomA(t: Term): TupleTableAtom = { 198 def atomA(t: Term): TupleTableAtom = {
192 val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI 199 val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI
193 TupleTableAtom.rdf(t, IRI.RDF_TYPE, cls) 200 TupleTableAtom.create(graph, t, IRI.RDF_TYPE, cls)
194 } 201 }
195 def roleRf(t: Term): TupleTableAtom = 202 def roleRf(t: Term): TupleTableAtom =
196 super.convert(roleR, t, v1, Forward) 203 super.convert(roleR, t, v1, Forward)
@@ -200,7 +207,7 @@ class CanonicalModel(val ontology: RSAOntology) {
200 .getFiller 207 .getFiller
201 .asInstanceOf[OWLClass] 208 .asInstanceOf[OWLClass]
202 .getIRI 209 .getIRI
203 TupleTableAtom.rdf(v1, IRI.RDF_TYPE, cls) 210 TupleTableAtom.create(graph, v1, IRI.RDF_TYPE, cls)
204 } 211 }
205 cycle.flatMap { x => 212 cycle.flatMap { x =>
206 List( 213 List(
@@ -216,13 +223,13 @@ class CanonicalModel(val ontology: RSAOntology) {
216 unsafe: List[OWLObjectPropertyExpression], 223 unsafe: List[OWLObjectPropertyExpression],
217 skolem: SkolemStrategy, 224 skolem: SkolemStrategy,
218 suffix: RSASuffix 225 suffix: RSASuffix
219 ): Result = 226 )(implicit fresh: DataFactory): Result =
220 axiom match { 227 axiom match {
221 228
222 case a: OWLSubClassOfAxiom if a.isT5 => { 229 case a: OWLSubClassOfAxiom if a.isT5 => {
223 val role = axiom.objectPropertyExpressionsInSignature(0) 230 val role = axiom.objectPropertyExpressionsInSignature(0)
224 if (unsafe contains role) 231 if (unsafe contains role)
225 super.convert(a, term, unsafe, new Standard(a), Forward) 232 super.convert(a, term, unsafe, new Standard(a), Forward)(fresh)
226 else { 233 else {
227 val (f1, r1) = rules1(a) 234 val (f1, r1) = rules1(a)
228 (f1, r1 ::: rules2(a) ::: rules3(a)) 235 (f1, r1 ::: rules2(a) ::: rules3(a))
@@ -231,12 +238,12 @@ class CanonicalModel(val ontology: RSAOntology) {
231 238
232 case a: OWLSubObjectPropertyOfAxiom => { 239 case a: OWLSubObjectPropertyOfAxiom => {
233 val (facts, rules) = List(Empty, Forward, Backward) 240 val (facts, rules) = List(Empty, Forward, Backward)
234 .map(super.convert(a, term, unsafe, NoSkolem, _)) 241 .map(super.convert(a, term, unsafe, NoSkolem, _)(fresh))
235 .unzip 242 .unzip
236 (facts.flatten, rules.flatten) 243 (facts.flatten, rules.flatten)
237 } 244 }
238 245
239 case a => super.convert(a, term, unsafe, skolem, suffix) 246 case a => super.convert(a, term, unsafe, skolem, suffix)(fresh)
240 247
241 } 248 }
242 } 249 }
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
index 258c226..121c65f 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
@@ -16,7 +16,8 @@
16 16
17package uk.ac.ox.cs.rsacomb 17package uk.ac.ox.cs.rsacomb
18 18
19import java.io.File 19import java.io.{File, PrintWriter}
20import java.nio.file.{Path, Paths, InvalidPathException}
20import java.util.HashMap 21import java.util.HashMap
21import scala.collection.JavaConverters._ 22import scala.collection.JavaConverters._
22import tech.oxfordsemantic.jrdfox.client.UpdateType 23import tech.oxfordsemantic.jrdfox.client.UpdateType
@@ -28,97 +29,7 @@ import sparql.ConjunctiveQuery
28 29
29import uk.ac.ox.cs.rsacomb.ontology.Ontology 30import uk.ac.ox.cs.rsacomb.ontology.Ontology
30import uk.ac.ox.cs.rsacomb.converter.Normalizer 31import uk.ac.ox.cs.rsacomb.converter.Normalizer
31import uk.ac.ox.cs.rsacomb.approximation.LowerBound 32import uk.ac.ox.cs.rsacomb.approximation.{Upperbound, Lowerbound}
32
33case class RSAOption[+T](opt: T) {
34 def get[T]: T = opt.asInstanceOf[T]
35}
36
37object RSAConfig {
38 type Config = Map[Symbol, RSAOption[Any]]
39
40 private implicit def toRSAOption[T](opt: T) = RSAOption[T](opt)
41
42 /** Help message */
43 private val help: String = """
44 rsacomb - combined approach for CQ answering for RSA ontologies.
45
46 USAGE
47 rsacomb [OPTIONS] <ontology> [<data> ...]
48
49 -h | -? | --help
50 print this help message
51
52 -q <file> | --query <file>
53 path to a file containing a single SPARQL query. If no query
54 is provided, only the approximation to RSA will be performed.
55
56 <ontology>
57 file containing the ontology
58
59 <data>
60 one or more data files
61
62 """
63
64 /** Default config values */
65 private val default: Config = Map.empty
66
67 /** Utility to exit the program with a custom message on stderr.
68 *
69 * The program will exit with error code 1 after printing the help
70 * message.
71 *
72 * @param msg message printed to stderr.
73 */
74 private def exit(msg: String): Nothing = {
75 System.err.println(msg)
76 System.err.println()
77 System.err.println(help)
78 sys.exit(1)
79 }
80
81 /** Parse arguments with default options
82 *
83 * @param args arguments list
84 * @return map of config options
85 */
86 def parse(args: List[String]): Config = parse(args, default)
87
88 /** Parse arguments
89 *
90 * @param args arguments list
91 * @param config default configuration
92 * @return map of config options
93 */
94 def parse(args: List[String], config: Config): Config = {
95 args match {
96 case flag @ ("-h" | "-?" | "--help") :: _ => {
97 println(help)
98 sys.exit(0)
99 }
100 case flag @ ("-q" | "--query") :: _query :: tail => {
101 val query = new File(_query)
102 if (!query.isFile)
103 exit(s"'$query' is not a valid filename.")
104 parse(tail, config ++ Map('query -> query))
105 }
106 case _ontology :: _data => {
107 val ontology = new File(_ontology)
108 val data = _data.map(new File(_))
109 (ontology :: data) foreach { (file) =>
110 if (!file.isFile)
111 exit(s"'$file' is not a valid filename.")
112 }
113 finalise(config ++ Map('ontology -> ontology, 'data -> data))
114 }
115 case a => exit(s"Invalid sequence of arguments '${a.mkString(" ")}'.")
116 }
117 }
118
119 /** Perform final checks on parsed options */
120 private def finalise(config: Config): Config = config
121}
122 33
123/** Main entry point to the program */ 34/** Main entry point to the program */
124object RSAComb extends App { 35object RSAComb extends App {
@@ -126,42 +37,52 @@ object RSAComb extends App {
126 /* Command-line options */ 37 /* Command-line options */
127 val config = RSAConfig.parse(args.toList) 38 val config = RSAConfig.parse(args.toList)
128 39
40 /* Set logger level */
41 if (config.contains('logger))
42 Logger.level = config('logger).get[Logger.Level]
43
129 /* Load original ontology and normalize it */ 44 /* Load original ontology and normalize it */
130 val ontology = Ontology( 45 val ontology = Ontology(
131 config('ontology).get[File], 46 config('ontology).get[os.Path],
132 config('data).get[List[File]] 47 config('data).get[List[os.Path]]
133 ).normalize(new Normalizer) 48 ).normalize(new Normalizer)
134 49
50 //ontology.axioms foreach println
51
135 /* Approximate the ontology to RSA */ 52 /* Approximate the ontology to RSA */
136 val toRSA = new LowerBound 53 val toRSA = new Upperbound
137 val rsa = ontology approximate toRSA 54 val rsa = ontology approximate toRSA
138 55
139 if (config contains 'query) { 56 if (config contains 'queries) {
140 val query = 57 val queries =
141 RDFoxUtil.loadQueryFromFile(config('query).get[File].getAbsoluteFile) 58 RDFoxUtil.loadQueriesFromFile(
142 59 config('queries).get[os.Path]
143 ConjunctiveQuery.parse(query) match { 60 )
144 case Some(query) => { 61
145 // Retrieve answers 62 val answers = rsa ask queries
146 val answers = rsa ask query 63
147 Logger.print(s"$answers", Logger.VERBOSE) 64 /* Write answers to output file */
148 Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" 65 if (config.contains('answers))
149 // Retrieve unfiltered answers 66 os.write(
150 // val unfiltered = rsa.queryDataStore( 67 config('answers).get[os.Path],
151 // """ 68 ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 4),
152 // SELECT (count(?K) as ?COUNT) 69 createFolders = true
153 // WHERE { 70 )
154 // ?K a rsa:QM . 71
155 // } 72 // Logger.print(s"$answers", Logger.VERBOSE)
156 // """, 73 // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})"
157 // RSA.Prefixes 74 // Retrieve unfiltered answers
158 // ) 75 // val unfiltered = rsa.queryDataStore(
159 // unfiltered.foreach((u) => 76 // """
160 // Logger print s"Number of unfiltered answers: ${u.head._2}" 77 // SELECT (count(?K) as ?COUNT)
161 // ) 78 // WHERE {
162 } 79 // ?K a rsa:QM .
163 case None => 80 // }
164 throw new RuntimeException("Submitted query is not conjunctive") 81 // """,
165 } 82 // RSA.Prefixes
83 // )
84 // unfiltered.foreach((u) =>
85 // Logger print s"Number of unfiltered answers: ${u.head._2}"
86 // )
166 } 87 }
167} 88}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala
new file mode 100644
index 0000000..4d96850
--- /dev/null
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala
@@ -0,0 +1,154 @@
1/*
2 * Copyright 2020, 2021 KRR Oxford
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package uk.ac.ox.cs.rsacomb
18
19import scala.collection.mutable.Map
20import util.Logger
21
22case class RSAOption[+T](opt: T) {
23 def get[T]: T = opt.asInstanceOf[T]
24}
25
26object RSAConfig {
27 type Config = Map[Symbol, RSAOption[Any]]
28
29 private implicit def toRSAOption[T](opt: T) = RSAOption[T](opt)
30
31 /** Help message */
32 private val help: String = """
33 rsacomb - combined approach for CQ answering for RSA ontologies.
34
35 USAGE
36 rsacomb [OPTIONS] <ontology> [<data> ...]
37
38 -h | -? | --help
39 print this help message
40
41 -l | --logger <level>
42 specify the logger verbosity. Values are: quiet, normal (default),
43 debug, verbose.
44
45 -a | --answers <file>
46 path to the output file for the answers to the query (in JSON
47 format)
48
49 -q | --queries <file>
50 path to a file containing a single SPARQL query. If no query
51 is provided, only the approximation to RSA will be performed.
52
53 -o | --ontology <file>
54 ontology file in OWL format.
55
56 -d | --data <file>
57 data files to be used alongside the ontology file. If a
58 directory is provided, all files in the directory (recursively)
59 will be considered.
60
61 """
62
63 /** Default config values */
64 private val default: Config = Map.empty
65
66 /** Utility to exit the program with a custom message on stderr.
67 *
68 * The program will exit with error code 1 after printing the help
69 * message.
70 *
71 * @param msg message printed to stderr.
72 */
73 private def exit(msg: String): Nothing = {
74 System.err.println(msg)
75 System.err.println()
76 System.err.println(help)
77 sys.exit(1)
78 }
79
80 private def getPath(str: String): os.Path =
81 try {
82 os.Path(str, base = os.pwd)
83 } catch {
84 case e: IllegalArgumentException =>
85 exit(s"'$str' is not a well formed path.")
86 }
87
88 /** Parse arguments with default options
89 *
90 * @param args arguments list
91 * @return map of config options
92 */
93 def parse(args: List[String]): Config = parse(args, default)
94
95 /** Parse arguments
96 *
97 * @param args arguments list
98 * @param config default configuration
99 * @return map of config options
100 */
101 def parse(args: List[String], config: Config): Config = {
102 args match {
103 case Nil => finalise(config)
104 case flag @ ("-h" | "-?" | "--help") :: _ => {
105 println(help)
106 sys.exit(0)
107 }
108 case flag @ ("-l" | "--logger") :: _level :: tail => {
109 val level = _level match {
110 case "quiet" => Logger.QUIET
111 case "debug" => Logger.DEBUG
112 case "verbose" => Logger.VERBOSE
113 case _ => Logger.NORMAL
114 }
115 parse(tail, config += ('logger -> level))
116 }
117 case flag @ ("-a" | "--answers") :: answers :: tail =>
118 parse(tail, config += ('answers -> getPath(answers)))
119 case flag @ ("-q" | "--queries") :: _query :: tail => {
120 val query = getPath(_query)
121 if (!os.isFile(query))
122 exit(s"'${_query}' is not a valid filename.")
123 parse(tail, config += ('queries -> query))
124 }
125 case flag @ ("-o" | "--ontology") :: _ontology :: tail => {
126 val ontology = getPath(_ontology)
127 if (!os.isFile(ontology))
128 exit(s"'${_ontology}' is not a valid filename.")
129 parse(tail, config += ('ontology -> ontology))
130 }
131 case flag @ ("-d" | "--data") :: _data :: tail => {
132 val data = getPath(_data)
133 val files =
134 if (os.isFile(data))
135 Seq(data)
136 else if (os.isDir(data))
137 os.walk(data).filter(os.isFile)
138 else
139 exit(s"'${_data}' is not a valid path.")
140 parse(tail, config += ('data -> files))
141 }
142 case a => exit(s"Invalid sequence of arguments '${a.mkString(" ")}'.")
143 }
144 }
145
146 /** Perform final checks on parsed options */
147 private def finalise(config: Config): Config = {
148 if (!config.contains('ontology))
149 exit("The following flag is mandatory: '-o' or '--ontology'.")
150 if (!config.contains('data))
151 config += ('data -> List.empty[os.Path])
152 config
153 }
154}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
index 30e1305..1ff466b 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
@@ -27,6 +27,7 @@ import org.semanticweb.owlapi.model.{OWLOntology, OWLAxiom, OWLLogicalAxiom}
27import org.semanticweb.owlapi.model.{ 27import org.semanticweb.owlapi.model.{
28 OWLClass, 28 OWLClass,
29 OWLClassExpression, 29 OWLClassExpression,
30 OWLDataProperty,
30 OWLDataPropertyAssertionAxiom, 31 OWLDataPropertyAssertionAxiom,
31 OWLObjectProperty, 32 OWLObjectProperty,
32 OWLSubObjectPropertyOfAxiom, 33 OWLSubObjectPropertyOfAxiom,
@@ -46,17 +47,20 @@ import tech.oxfordsemantic.jrdfox.client.{
46} 47}
47import tech.oxfordsemantic.jrdfox.Prefixes 48import tech.oxfordsemantic.jrdfox.Prefixes
48import tech.oxfordsemantic.jrdfox.logic.datalog.{ 49import tech.oxfordsemantic.jrdfox.logic.datalog.{
50 BodyFormula,
51 FilterAtom,
52 Negation,
49 Rule, 53 Rule,
50 TupleTableAtom, 54 TupleTableAtom,
51 Negation, 55 TupleTableName
52 BodyFormula
53} 56}
54import tech.oxfordsemantic.jrdfox.logic.expression.{ 57import tech.oxfordsemantic.jrdfox.logic.expression.{
55 Term, 58 FunctionCall,
56 Variable,
57 IRI, 59 IRI,
60 Literal,
58 Resource, 61 Resource,
59 Literal 62 Term,
63 Variable
60} 64}
61import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery 65import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery
62 66
@@ -81,30 +85,6 @@ import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA}
81import uk.ac.ox.cs.rsacomb.util.Logger 85import uk.ac.ox.cs.rsacomb.util.Logger
82import uk.ac.ox.cs.rsacomb.ontology.Ontology 86import uk.ac.ox.cs.rsacomb.ontology.Ontology
83 87
84object RSAUtil {
85
86 // implicit def axiomsToOntology(axioms: Seq[OWLAxiom]) = {
87 // val manager = OWLManager.createOWLOntologyManager()
88 // manager.createOntology(axioms.asJava)
89 // }
90
91 /** Manager instance to interface with OWLAPI */
92 val manager = OWLManager.createOWLOntologyManager()
93 val factory = manager.getOWLDataFactory()
94
95 /** Simple fresh variable/class generator */
96 private var counter = -1;
97 def genFreshVariable(): Variable = {
98 counter += 1
99 Variable.create(f"I$counter%05d")
100 }
101 def getFreshOWLClass(): OWLClass = {
102 counter += 1
103 factory.getOWLClass(s"X$counter")
104 }
105
106}
107
108object RSAOntology { 88object RSAOntology {
109 89
110 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ 90 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
@@ -115,6 +95,20 @@ object RSAOntology {
115 /** Name of the RDFox data store used for CQ answering */ 95 /** Name of the RDFox data store used for CQ answering */
116 private val DataStore = "answer_computation" 96 private val DataStore = "answer_computation"
117 97
98 /** Canonical model named graph */
99 private val CanonGraph: IRI =
100 RDFoxUtil.getNamedGraph(DataStore, "CanonicalModel")
101
102 /** Filtering program named graph
103 *
104 * @param query query associated with the returned named graph.
105 *
106 * @return named graph for the filtering program associated with the
107 * input query.
108 */
109 private def FilterGraph(query: ConjunctiveQuery): IRI =
110 RDFoxUtil.getNamedGraph(DataStore, s"Filter${query.id}")
111
118 /** Filtering program for a given query 112 /** Filtering program for a given query
119 * 113 *
120 * @param query the query to derive the filtering program 114 * @param query the query to derive the filtering program
@@ -122,15 +116,19 @@ object RSAOntology {
122 */ 116 */
123 def filteringProgram(query: ConjunctiveQuery): FilteringProgram = 117 def filteringProgram(query: ConjunctiveQuery): FilteringProgram =
124 Logger.timed( 118 Logger.timed(
125 FilteringProgram(FilterType.REVISED)(query), 119 {
120 val filter = FilteringProgram(FilterType.REVISED)
121 filter(CanonGraph, FilterGraph(query), query)
122 },
126 "Generating filtering program", 123 "Generating filtering program",
127 Logger.DEBUG 124 Logger.DEBUG
128 ) 125 )
129 126
130 def apply( 127 def apply(
128 origin: OWLOntology,
131 axioms: List[OWLLogicalAxiom], 129 axioms: List[OWLLogicalAxiom],
132 datafiles: List[File] 130 datafiles: List[os.Path]
133 ): RSAOntology = new RSAOntology(axioms, datafiles) 131 ): RSAOntology = new RSAOntology(origin, axioms, datafiles)
134 132
135 // def apply( 133 // def apply(
136 // ontofile: File, 134 // ontofile: File,
@@ -197,8 +195,11 @@ object RSAOntology {
197 * @param ontology the input OWL2 ontology. 195 * @param ontology the input OWL2 ontology.
198 * @param datafiles additinal data (treated as part of the ABox) 196 * @param datafiles additinal data (treated as part of the ABox)
199 */ 197 */
200class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) 198class RSAOntology(
201 extends Ontology(axioms, datafiles) { 199 origin: OWLOntology,
200 axioms: List[OWLLogicalAxiom],
201 datafiles: List[os.Path]
202) extends Ontology(origin, axioms, datafiles) {
202 203
203 /** Simplify conversion between OWLAPI and RDFox concepts */ 204 /** Simplify conversion between OWLAPI and RDFox concepts */
204 import implicits.RDFox._ 205 import implicits.RDFox._
@@ -227,10 +228,9 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File])
227 /** Retrieve concepts/roles in the ontology */ 228 /** Retrieve concepts/roles in the ontology */
228 val concepts: List[OWLClass] = 229 val concepts: List[OWLClass] =
229 ontology.getClassesInSignature().asScala.toList 230 ontology.getClassesInSignature().asScala.toList
230 val roles: List[OWLObjectPropertyExpression] = 231 val objroles: List[OWLObjectPropertyExpression] =
231 axioms 232 axioms.flatMap(_.objectPropertyExpressionsInSignature).distinct
232 .flatMap(_.objectPropertyExpressionsInSignature) 233 val dataroles: List[OWLDataProperty] = origin.getDataPropertiesInSignature
233 .distinct
234 234
235 /** Unsafe roles of a given ontology. 235 /** Unsafe roles of a given ontology.
236 * 236 *
@@ -364,21 +364,32 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File])
364 private val topAxioms: List[Rule] = { 364 private val topAxioms: List[Rule] = {
365 val varX = Variable.create("X") 365 val varX = Variable.create("X")
366 val varY = Variable.create("Y") 366 val varY = Variable.create("Y")
367 concepts 367 val varZ = Variable.create("Z")
368 .map(c => { 368 val graph = TupleTableName.create(RSAOntology.CanonGraph.getIRI)
369 Rule.create( 369 Rule.create(
370 RSA.Thing(varX), 370 TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING),
371 TupleTableAtom.rdf(varX, IRI.RDF_TYPE, c.getIRI) 371 TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, varY)
372 ) 372 ) :: objroles.map(r => {
373 }) ++ roles.map(r => {
374 val name = r match { 373 val name = r match {
375 case x: OWLObjectProperty => x.getIRI.getIRIString 374 case x: OWLObjectProperty => x.getIRI.getIRIString
376 case x: OWLObjectInverseOf => 375 case x: OWLObjectInverseOf =>
377 x.getInverse.getNamedProperty.getIRI.getIRIString :: Inverse 376 x.getInverse.getNamedProperty.getIRI.getIRIString :: Inverse
378 } 377 }
379 Rule.create( 378 Rule.create(
380 List(RSA.Thing(varX), RSA.Thing(varY)), 379 List(
381 List(TupleTableAtom.rdf(varX, name, varY)) 380 TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING),
381 TupleTableAtom.create(graph, varY, IRI.RDF_TYPE, IRI.THING)
382 ),
383 List(TupleTableAtom.create(graph, varX, name, varY))
384 )
385 }) ::: dataroles.map(r => {
386 val name = r.getIRI.getIRIString
387 Rule.create(
388 List(
389 TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING),
390 TupleTableAtom.create(graph, varY, IRI.RDF_TYPE, IRI.THING)
391 ),
392 List(TupleTableAtom.create(graph, varX, name, varY))
382 ) 393 )
383 }) 394 })
384 } 395 }
@@ -403,23 +414,31 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File])
403 val varX = Variable.create("X") 414 val varX = Variable.create("X")
404 val varY = Variable.create("Y") 415 val varY = Variable.create("Y")
405 val varZ = Variable.create("Z") 416 val varZ = Variable.create("Z")
406 List( 417 val graph = TupleTableName.create(RSAOntology.CanonGraph.getIRI)
418 // Equality properties
419 val properties = List(
407 // Reflexivity 420 // Reflexivity
408 Rule.create(RSA.Congruent(varX, varX), RSA.Thing(varX)), 421 Rule.create(
422 TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varX),
423 TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING)
424 ),
409 // Simmetry 425 // Simmetry
410 Rule.create(RSA.Congruent(varY, varX), RSA.Congruent(varX, varY)), 426 Rule.create(
427 TupleTableAtom.create(graph, varY, RSA.CONGRUENT, varX),
428 TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varY)
429 ),
411 // Transitivity 430 // Transitivity
412 Rule.create( 431 Rule.create(
413 RSA.Congruent(varX, varZ), 432 TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varZ),
414 RSA.Congruent(varX, varY), 433 TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varY),
415 RSA.Congruent(varY, varZ) 434 TupleTableAtom.create(graph, varY, RSA.CONGRUENT, varZ)
416 ) 435 )
417 ) 436 )
418 } 437 }
419 438
420 /** Canonical model of the ontology */ 439 /** Canonical model of the ontology */
421 lazy val canonicalModel = Logger.timed( 440 lazy val canonicalModel = Logger.timed(
422 new CanonicalModel(this), 441 new CanonicalModel(this, RSAOntology.CanonGraph),
423 "Generating canonical model program", 442 "Generating canonical model program",
424 Logger.DEBUG 443 Logger.DEBUG
425 ) 444 )
@@ -520,73 +539,143 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File])
520 def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = 539 def unfold(axiom: OWLSubClassOfAxiom): Set[Term] =
521 this.self(axiom) | this.cycle(axiom) 540 this.self(axiom) | this.cycle(axiom)
522 541
523 /** Returns the answers to a query 542 /** Returns the answers to a single query
524 * 543 *
525 * @param query query to execute 544 * @param queries a sequence of conjunctive queries to answer.
526 * @return a collection of answers 545 * @return a collection of answers for each query.
527 */ 546 */
528 def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed( 547 def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = this._ask(query)
529 {
530 val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore)
531 val canon = this.canonicalModel
532 val filter = RSAOntology.filteringProgram(query)
533 548
534 /* Upload data from data file */ 549 /** Returns the answers to a collection of queries
535 RDFoxUtil.addData(data, datafiles: _*) 550 *
536 551 * @param queries a sequence of conjunctive queries to answer.
537 RDFoxUtil printStatisticsFor data 552 * @return a collection of answers for each query.
553 */
554 def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] =
555 queries map _ask
538 556
539 /* Top / equality axiomatization */ 557 private lazy val _ask: ConjunctiveQuery => ConjunctiveQueryAnswers = {
540 RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) 558 val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore)
541 559
542 /* Generate `named` predicates */ 560 /* Upload data from data file */
543 RDFoxUtil.addFacts(data, (individuals ++ literals) map RSA.Named) 561 RDFoxUtil.addData(data, RSAOntology.CanonGraph, datafiles: _*)
544 data.evaluateUpdate( 562
545 RSA.Prefixes, 563 /* Top/equality axiomatization */
546 "INSERT { ?X a rsa:Named } WHERE { ?X a owl:Thing }", 564 RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms)
547 new java.util.HashMap[String, String] 565 Logger.write(topAxioms.mkString("\n"), "canonical_model.datalog")
548 ) 566 Logger.write(equalityAxioms.mkString("\n"), "canonical_model.datalog")
567
568 /* Introduce `rsacomb:Named` concept */
569 data.evaluateUpdate(
570 null, // the base IRI for the query (if null, a default is used)
571 RSA.Prefixes,
572 s"""
573 INSERT {
574 GRAPH ${RSAOntology.CanonGraph} { ?X a ${RSA.NAMED} }
575 } WHERE {
576 GRAPH ${RSAOntology.CanonGraph} { ?X a ${IRI.THING} }
577 }
578 """,
579 new java.util.HashMap[String, String]
580 )
549 581
550 /* Add canonical model */ 582 /* Add canonical model */
551 Logger print s"Canonical model rules: ${canon.rules.length}" 583 Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}"
552 RDFoxUtil.addRules(data, canon.rules) 584 RDFoxUtil.addFacts(data, RSAOntology.CanonGraph, this.canonicalModel.facts)
585 Logger print s"Canonical model rules: ${this.canonicalModel.rules.length}"
586 Logger.write(canonicalModel.rules.mkString("\n"), "canonical_model.datalog")
587 RDFoxUtil.addRules(data, this.canonicalModel.rules)
553 588
554 Logger print s"Canonical model facts: ${canon.facts.length}" 589 RDFoxUtil.closeConnection(server, data)
555 RDFoxUtil.addFacts(data, canon.facts)
556 590
557 RDFoxUtil printStatisticsFor data 591 (query => {
592 val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore)
558 593
559 //{ 594 val filter = RSAOntology.filteringProgram(query)
560 // import java.io.{PrintStream, FileOutputStream, File}
561 // val rules1 = new FileOutputStream(new File("rules1-lubm200.dlog"))
562 // val facts1 = new FileOutputStream(new File("facts1-lubm200.ttl"))
563 // RDFoxUtil.export(data, rules1, facts1)
564 // val rules2 = new PrintStream(new File("rules2-q34.dlog"))
565 // rules2.print(filter.rules.mkString("\n"))
566 //}
567 595
568 /* Add filtering program */ 596 /* Add filtering program */
569 Logger print s"Filtering program rules: ${filter.rules.length}" 597 Logger print s"Filtering program rules: ${filter.rules.length}"
598 Logger.write(filter.rules.mkString("\n"), s"filter${query.id}.datalog")
570 RDFoxUtil.addRules(data, filter.rules) 599 RDFoxUtil.addRules(data, filter.rules)
571 600
572 RDFoxUtil printStatisticsFor data 601 // TODO: We remove the rules, should we drop the tuple table as well?
602 data.clearRulesAxiomsExplicateFacts()
573 603
574 /* Gather answers to the query */ 604 /* Gather answers to the query */
575 val answers = { 605 val answers = RDFoxUtil
576 val ans = filter.answerQuery 606 .submitQuery(data, filter.answerQuery, RSA.Prefixes)
577 RDFoxUtil 607 .map(new ConjunctiveQueryAnswers(query, query.variables, _))
578 .submitQuery(data, ans, RSA.Prefixes) 608 .get
579 .map(new ConjunctiveQueryAnswers(query.bcq, query.variables, _))
580 .get
581 }
582 609
583 RDFoxUtil.closeConnection(server, data) 610 RDFoxUtil.closeConnection(server, data)
584 611
585 answers 612 answers
586 }, 613 })
587 "Answers computation", 614 }
588 Logger.DEBUG 615
589 ) 616 //def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed(
617 // {
618 // val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore)
619 // val canon = this.canonicalModel
620 // val filter = RSAOntology.filteringProgram(query)
621
622 // /* Upload data from data file */
623 // RDFoxUtil.addData(data, datafiles: _*)
624
625 // RDFoxUtil printStatisticsFor data
626
627 // /* Top / equality axiomatization */
628 // RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms)
629
630 // /* Generate `named` predicates */
631 // RDFoxUtil.addFacts(data, (individuals ++ literals) map RSA.Named)
632 // data.evaluateUpdate(
633 // null, // the base IRI for the query (if null, a default is used)
634 // RSA.Prefixes,
635 // "INSERT { ?X a rsa:Named } WHERE { ?X a owl:Thing }",
636 // new java.util.HashMap[String, String]
637 // )
638
639 // /* Add canonical model */
640 // Logger print s"Canonical model rules: ${canon.rules.length}"
641 // RDFoxUtil.addRules(data, canon.rules)
642
643 // Logger print s"Canonical model facts: ${canon.facts.length}"
644 // RDFoxUtil.addFacts(data, canon.facts)
645
646 // RDFoxUtil printStatisticsFor data
647
648 // //{
649 // // import java.io.{PrintStream, FileOutputStream, File}
650 // // val rules1 = new FileOutputStream(new File("rules1-lubm200.dlog"))
651 // // val facts1 = new FileOutputStream(new File("facts1-lubm200.ttl"))
652 // // RDFoxUtil.export(data, rules1, facts1)
653 // // val rules2 = new PrintStream(new File("rules2-q34.dlog"))
654 // // rules2.print(filter.rules.mkString("\n"))
655 // //}
656
657 // /* Add filtering program */
658 // Logger print s"Filtering program rules: ${filter.rules.length}"
659 // RDFoxUtil.addRules(data, filter.rules)
660
661 // RDFoxUtil printStatisticsFor data
662
663 // /* Gather answers to the query */
664 // val answers = {
665 // val ans = filter.answerQuery
666 // RDFoxUtil
667 // .submitQuery(data, ans, RSA.Prefixes)
668 // .map(new ConjunctiveQueryAnswers(query, query.variables, _))
669 // .get
670 // }
671
672 // RDFoxUtil.closeConnection(server, data)
673
674 // answers
675 // },
676 // "Answers computation",
677 // Logger.DEBUG
678 //)
590 679
591 /** Query the RDFox data store used for query answering. 680 /** Query the RDFox data store used for query answering.
592 * 681 *
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala
index 60a88fb..e261bce 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala
@@ -13,10 +13,10 @@ import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._
13import scalax.collection.GraphTraversal._ 13import scalax.collection.GraphTraversal._
14 14
15import uk.ac.ox.cs.rsacomb.RSAOntology 15import uk.ac.ox.cs.rsacomb.RSAOntology
16import uk.ac.ox.cs.rsacomb.RSAUtil
17import uk.ac.ox.cs.rsacomb.ontology.Ontology 16import uk.ac.ox.cs.rsacomb.ontology.Ontology
17import uk.ac.ox.cs.rsacomb.util.DataFactory
18 18
19object LowerBound { 19object Lowerbound {
20 20
21 private val manager = OWLManager.createOWLOntologyManager() 21 private val manager = OWLManager.createOWLOntologyManager()
22 private val factory = manager.getOWLDataFactory() 22 private val factory = manager.getOWLDataFactory()
@@ -38,7 +38,8 @@ object LowerBound {
38 * 38 *
39 * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] 39 * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]]
40 */ 40 */
41class LowerBound extends Approximation[RSAOntology] { 41class Lowerbound(implicit fresh: DataFactory)
42 extends Approximation[RSAOntology] {
42 43
43 /** Simplify conversion between Java and Scala collections */ 44 /** Simplify conversion between Java and Scala collections */
44 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ 45 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
@@ -50,6 +51,7 @@ class LowerBound extends Approximation[RSAOntology] {
50 def approximate(ontology: Ontology): RSAOntology = 51 def approximate(ontology: Ontology): RSAOntology =
51 toRSA( 52 toRSA(
52 new Ontology( 53 new Ontology(
54 ontology.origin,
53 ontology.axioms filter inALCHOIQ flatMap shift, 55 ontology.axioms filter inALCHOIQ flatMap shift,
54 ontology.datafiles 56 ontology.datafiles
55 ) 57 )
@@ -122,27 +124,25 @@ class LowerBound extends Approximation[RSAOntology] {
122 val sup = a.getSuperClass.getNNF 124 val sup = a.getSuperClass.getNNF
123 sup match { 125 sup match {
124 case sup: OWLObjectUnionOf => { 126 case sup: OWLObjectUnionOf => {
125 val body = sub.asConjunctSet.map((atom) => 127 val body =
126 (atom, RSAUtil.getFreshOWLClass()) 128 sub.asConjunctSet.map((atom) => (atom, fresh.getOWLClass))
127 ) 129 val head =
128 val head = sup.asDisjunctSet.map((atom) => 130 sup.asDisjunctSet.map((atom) => (atom, fresh.getOWLClass))
129 (atom, RSAUtil.getFreshOWLClass())
130 )
131 131
132 val r1 = 132 val r1 =
133 LowerBound.factory.getOWLSubClassOfAxiom( 133 Lowerbound.factory.getOWLSubClassOfAxiom(
134 LowerBound.factory.getOWLObjectIntersectionOf( 134 Lowerbound.factory.getOWLObjectIntersectionOf(
135 (body.map(_._1) ++ head.map(_._2)): _* 135 (body.map(_._1) ++ head.map(_._2)): _*
136 ), 136 ),
137 LowerBound.factory.getOWLNothing 137 Lowerbound.factory.getOWLNothing
138 ) 138 )
139 139
140 val r2s = 140 val r2s =
141 for { 141 for {
142 (a, na) <- head 142 (a, na) <- head
143 hs = head.map(_._2).filterNot(_ equals na) 143 hs = head.map(_._2).filterNot(_ equals na)
144 } yield LowerBound.factory.getOWLSubClassOfAxiom( 144 } yield Lowerbound.factory.getOWLSubClassOfAxiom(
145 LowerBound.factory.getOWLObjectIntersectionOf( 145 Lowerbound.factory.getOWLObjectIntersectionOf(
146 (body.map(_._1) ++ hs): _* 146 (body.map(_._1) ++ hs): _*
147 ), 147 ),
148 a 148 a
@@ -152,8 +152,8 @@ class LowerBound extends Approximation[RSAOntology] {
152 for { 152 for {
153 (a, na) <- body 153 (a, na) <- body
154 bs = body.map(_._1).filterNot(_ equals a) 154 bs = body.map(_._1).filterNot(_ equals a)
155 } yield LowerBound.factory.getOWLSubClassOfAxiom( 155 } yield Lowerbound.factory.getOWLSubClassOfAxiom(
156 LowerBound.factory.getOWLObjectIntersectionOf( 156 Lowerbound.factory.getOWLObjectIntersectionOf(
157 (bs ++ head.map(_._2)): _* 157 (bs ++ head.map(_._2)): _*
158 ), 158 ),
159 na 159 na
@@ -219,7 +219,11 @@ class LowerBound extends Approximation[RSAOntology] {
219 }.toList 219 }.toList
220 220
221 /* Remove axioms from approximated ontology */ 221 /* Remove axioms from approximated ontology */
222 RSAOntology(ontology.axioms diff toDelete, ontology.datafiles) 222 RSAOntology(
223 ontology.origin,
224 ontology.axioms diff toDelete,
225 ontology.datafiles
226 )
223 } 227 }
224 228
225 // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> 229 // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~>
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala
new file mode 100644
index 0000000..469d774
--- /dev/null
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala
@@ -0,0 +1,178 @@
1package uk.ac.ox.cs.rsacomb.approximation
2
3// import java.io.File
4
5import org.semanticweb.owlapi.apibinding.OWLManager
6import org.semanticweb.owlapi.model.{IRI => _, _}
7
8import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, IRI}
9
10import scala.collection.mutable.Map
11import scalax.collection.Graph
12import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._
13import scalax.collection.GraphTraversal._
14
15import uk.ac.ox.cs.rsacomb.RSAOntology
16import uk.ac.ox.cs.rsacomb.ontology.Ontology
17import uk.ac.ox.cs.rsacomb.util.DataFactory
18
19object Upperbound {
20
21 private val manager = OWLManager.createOWLOntologyManager()
22 private val factory = manager.getOWLDataFactory()
23
24}
25
26/** Approximation algorithm that mantains completeness for CQ answering.
27 *
28 * The input OWL 2 ontology is assumed to be normalized and the output
29 * ontology is guaranteed to be in RSA.
30 *
31 * The algorithm is performed in three steps:
32 * 1. the ontology is reduced to ALCHOIQ by discarding any axiom
33 * that is not in the language;
34 * 2. the ontology is further reduced to Horn-ALCHOIQ by shifting
35 * axioms with disjunction on the rhs;
36 * 3. the ontology is approximated to RSA by manipulating its
37 * dependency graph.
38 *
39 * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]]
40 */
41class Upperbound(implicit fresh: DataFactory)
42 extends Approximation[RSAOntology] {
43
44 /** Simplify conversion between Java and Scala collections */
45 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
46
47 /** Simplify conversion between OWLAPI and RDFox concepts */
48 // import uk.ac.ox.cs.rsacomb.implicits.RDFox._
49
50 /** Main entry point for the approximation algorithm */
51 def approximate(ontology: Ontology): RSAOntology =
52 toRSA(
53 new Ontology(
54 ontology.origin,
55 ontology.axioms flatMap toConjuncts,
56 ontology.datafiles
57 )
58 )
59
60 /** Turn disjuncts into conjuncts
61 *
62 * This is a very naïve way of getting rid of disjunction preserving
63 * completeness of CQ answering.
64 *
65 * @todo implement a choice function that decides which disjunct to
66 * keep instead of keeping all of them. Note that PAGOdA is currently
67 * doing something similar.
68 */
69 private def toConjuncts(axiom: OWLLogicalAxiom): List[OWLLogicalAxiom] =
70 axiom match {
71 case a: OWLSubClassOfAxiom => {
72 val sub = a.getSubClass.getNNF
73 val sup = a.getSuperClass.getNNF
74 sup match {
75 case sup: OWLObjectUnionOf =>
76 sup.asDisjunctSet.map(
77 Upperbound.factory.getOWLSubClassOfAxiom(sub, _)
78 )
79 case _ => List(axiom)
80 }
81 }
82 case _ => List(axiom)
83 }
84
85 /** Approximate a Horn-ALCHOIQ ontology to RSA
86 *
87 * This is done by gathering those existential axioms that prevent
88 * the ontology dependency graph from being tree-shaped and constant
89 * skolemize them.
90 *
91 * @param ontology the set of axioms to approximate.
92 * @return the approximated RSA ontology
93 */
94 private def toRSA(ontology: Ontology): RSAOntology = {
95 /* Compute the dependency graph for the ontology */
96 val (graph, nodemap) = ontology.dependencyGraph
97
98 /* Define node colors for the graph visit */
99 sealed trait NodeColor
100 case object Unvisited extends NodeColor
101 case object Visited extends NodeColor
102 case object ToSkolem extends NodeColor
103
104 /* Keep track of node colors during graph visit */
105 var color = Map.from[Resource, NodeColor](
106 graph.nodes.toOuter.map(k => (k, Unvisited))
107 )
108
109 for {
110 component <- graph.componentTraverser().map(_ to Graph)
111 edge <- component
112 .outerEdgeTraverser(component.nodes.head)
113 .withKind(BreadthFirst)
114 } yield {
115 val source = edge._1
116 val target = edge._2
117 color(source) match {
118 case Unvisited | Visited => {
119 color(target) match {
120 case Unvisited =>
121 color(source) = Visited;
122 color(target) = Visited
123 case Visited =>
124 color(source) = ToSkolem
125 case ToSkolem =>
126 color(source) = Visited
127 }
128 }
129 case ToSkolem => {}
130 }
131 }
132
133 val toSkolem = color.collect { case (resource: IRI, ToSkolem) =>
134 nodemap(resource.getIRI)
135 }.toList
136
137 // Force constant skolemization by introducing a fresh individual
138 // (singleton class).
139 val skolemized = toSkolem flatMap { (axiom) =>
140 import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._
141 axiom.toTriple match {
142 case Some((subclass, role, filler)) => {
143 val skolem = Upperbound.factory.getOWLNamedIndividual(
144 s"i_${axiom.toString.hashCode}"
145 )
146 val cls = fresh.getOWLClass
147 List(
148 Upperbound.factory.getOWLSubClassOfAxiom(
149 subclass,
150 Upperbound.factory.getOWLObjectSomeValuesFrom(role, cls)
151 ),
152 Upperbound.factory.getOWLSubClassOfAxiom(
153 cls,
154 Upperbound.factory.getOWLObjectOneOf(skolem)
155 ),
156 Upperbound.factory.getOWLClassAssertionAxiom(filler, skolem)
157 )
158 }
159 case None => List()
160 }
161 }
162
163 /* Substitute selected axioms with their "skolemized" version */
164 RSAOntology(
165 ontology.origin,
166 ontology.axioms diff toSkolem concat skolemized,
167 ontology.datafiles
168 )
169 }
170
171 // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~>
172 // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F,
173 // 'C ~> 'G, 'D ~> 'C, 'H ~> 'D)
174 // val edges2 = Seq('I ~> 'M, 'I ~> 'L, 'L ~> 'N, 'M ~> 'N)
175 // val edges3 = Seq('P ~> 'O)
176 // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3)
177
178}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala
index 285040e..33cb715 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala
@@ -19,9 +19,8 @@ package uk.ac.ox.cs.rsacomb.converter
19import org.semanticweb.owlapi.apibinding.OWLManager 19import org.semanticweb.owlapi.apibinding.OWLManager
20import org.semanticweb.owlapi.model._ 20import org.semanticweb.owlapi.model._
21 21
22import uk.ac.ox.cs.rsacomb.util.Logger 22import uk.ac.ox.cs.rsacomb.util.{Logger, DataFactory}
23import uk.ac.ox.cs.rsacomb.RSAOntology 23import uk.ac.ox.cs.rsacomb.RSAOntology
24import uk.ac.ox.cs.rsacomb.RSAUtil
25 24
26object Normalizer { 25object Normalizer {
27 26
@@ -43,23 +42,11 @@ class Normalizer() {
43 /** Simplify conversion between Java and Scala collections */ 42 /** Simplify conversion between Java and Scala collections */
44 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ 43 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
45 44
46 /** Statistics */ 45 /** Normalizes a [[OWLLogicalAxiom]]
47 var discarded = 0
48 var shifted = 0
49
50 /** Normalizes a
51 * [[org.semanticweb.owlapi.model.OWLLogicalAxiom OWLLogicalAxiom]]
52 *
53 * @note not all possible axioms are supported. Following is a list
54 * of all unhandled class expressions:
55 * - [[org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom OWLAsymmetricObjectPropertyAxiom]]
56 * - [[org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom OWLDatatypeDefinitionAxiom]]
57 * - [[org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom OWLDisjointDataPropertiesAxiom]]
58 * - [[org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom OWLDisjointObjectPropertiesAxiom]]
59 * - [[org.semanticweb.owlapi.model.OWLHasKeyAxiom OWLHasKeyAxiom]]
60 * - [[org.semanticweb.owlapi.model.SWRLRule SWRLRule]]
61 */ 46 */
62 def normalize(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = 47 def normalize(
48 axiom: OWLLogicalAxiom
49 )(implicit fresh: DataFactory): Seq[OWLLogicalAxiom] =
63 axiom match { 50 axiom match {
64 case a: OWLSubClassOfAxiom => { 51 case a: OWLSubClassOfAxiom => {
65 val sub = a.getSubClass.getNNF 52 val sub = a.getSubClass.getNNF
@@ -70,11 +57,11 @@ class Normalizer() {
70 * C c D -> { C c X, X c D } 57 * C c D -> { C c X, X c D }
71 */ 58 */
72 case _ if !sub.isOWLClass && !sup.isOWLClass => { 59 case _ if !sub.isOWLClass && !sup.isOWLClass => {
73 val cls = RSAUtil.getFreshOWLClass() 60 val cls = fresh.getOWLClass
74 Seq( 61 Seq(
75 factory.getOWLSubClassOfAxiom(sub, cls), 62 factory.getOWLSubClassOfAxiom(sub, cls),
76 factory.getOWLSubClassOfAxiom(cls, sup) 63 factory.getOWLSubClassOfAxiom(cls, sup)
77 ).flatMap(normalize) 64 ).flatMap(normalize(_)(fresh))
78 } 65 }
79 /** Conjunction on the lhs 66 /** Conjunction on the lhs
80 * 67 *
@@ -91,7 +78,7 @@ class Normalizer() {
91 if (conj.isOWLClass) 78 if (conj.isOWLClass)
92 (acc1 :+ conj, acc2) 79 (acc1 :+ conj, acc2)
93 else { 80 else {
94 val cls = RSAUtil.getFreshOWLClass() 81 val cls = fresh.getOWLClass
95 ( 82 (
96 acc1 :+ cls, 83 acc1 :+ cls,
97 acc2 :+ factory.getOWLSubClassOfAxiom(conj, cls) 84 acc2 :+ factory.getOWLSubClassOfAxiom(conj, cls)
@@ -103,9 +90,11 @@ class Normalizer() {
103 factory.getOWLObjectIntersectionOf(acc1: _*), 90 factory.getOWLObjectIntersectionOf(acc1: _*),
104 sup 91 sup
105 )) 92 ))
106 .flatMap(normalize) 93 .flatMap(normalize(_)(fresh))
107 } else { 94 } else {
108 normalize(factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup)) 95 normalize(
96 factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup)
97 )(fresh)
109 } 98 }
110 } 99 }
111 /** Conjunction on the rhs 100 /** Conjunction on the rhs
@@ -117,9 +106,11 @@ class Normalizer() {
117 if (conjuncts.length > 0) { 106 if (conjuncts.length > 0) {
118 conjuncts 107 conjuncts
119 .map(cls => factory.getOWLSubClassOfAxiom(sub, cls)) 108 .map(cls => factory.getOWLSubClassOfAxiom(sub, cls))
120 .flatMap(normalize) 109 .flatMap(normalize(_)(fresh))
121 } else { 110 } else {
122 normalize(factory.getOWLSubClassOfAxiom(sub, factory.getOWLThing)) 111 normalize(
112 factory.getOWLSubClassOfAxiom(sub, factory.getOWLThing)
113 )(fresh)
123 } 114 }
124 } 115 }
125 /** Disjunction on the lhs 116 /** Disjunction on the lhs
@@ -131,33 +122,61 @@ class Normalizer() {
131 if (disjuncts.length > 0) { 122 if (disjuncts.length > 0) {
132 disjuncts 123 disjuncts
133 .map(cls => factory.getOWLSubClassOfAxiom(cls, sup)) 124 .map(cls => factory.getOWLSubClassOfAxiom(cls, sup))
134 .flatMap(normalize) 125 .flatMap(normalize(_)(fresh))
135 } else { 126 } else {
136 normalize( 127 normalize(
137 factory.getOWLSubClassOfAxiom(factory.getOWLNothing, sup) 128 factory.getOWLSubClassOfAxiom(factory.getOWLNothing, sup)
138 ) 129 )(fresh)
139 } 130 }
140 } 131 }
141 /** Disjunction on the rhs is not supported directly 132 /** Disjunction on the rhs
142 * 133 *
143 * Instead we `shift` the rule to eliminate the disjunction. 134 * B c A1 u ... u C u ... u An -> { X c C, B c A1 u ... u X u ... u An }
144 */ 135 */
145 case (_, sup: OWLObjectUnionOf) => 136 case (_, sup: OWLObjectUnionOf)
146 shift(sub, sup) flatMap normalize 137 if sup.asDisjunctSet.exists(c => !c.isOWLClass) => {
138 var additional = Seq()
139 val disjuncts = sup.asDisjunctSet
140 // BUG: why test for legth if this branch gets triggered only
141 // when there exists a ClassExpression in the disjuncts?
142 if (disjuncts.length > 0) {
143 val acc = (Seq[OWLClassExpression](), Seq[OWLLogicalAxiom]())
144 val (acc1, acc2) = disjuncts.foldLeft(acc)(
145 {
146 case ((acc1, acc2), disj: OWLClass) => (acc1 :+ disj, acc2)
147 case ((acc1, acc2), disj) => {
148 val cls = fresh.getOWLClass
149 (
150 acc1 :+ cls,
151 acc2 :+ factory.getOWLSubClassOfAxiom(cls, disj)
152 )
153 }
154 }
155 )
156 (acc2 :+ factory.getOWLSubClassOfAxiom(
157 sub,
158 factory.getOWLObjectUnionOf(acc1: _*)
159 )).flatMap(normalize(_)(fresh))
160 } else {
161 normalize(
162 factory.getOWLSubClassOfAxiom(sub, factory.getOWLNothing)
163 )(fresh)
164 }
165 }
147 /** Complex class expression on existential restriction on the lhs 166 /** Complex class expression on existential restriction on the lhs
148 * 167 *
149 * exists R . C c D -> { C c X, exists R . X c D } 168 * exists R . C c D -> { C c X, exists R . X c D }
150 */ 169 */
151 case (sub: OWLObjectSomeValuesFrom, _) 170 case (sub: OWLObjectSomeValuesFrom, _)
152 if !sub.getFiller.isOWLClass => { 171 if !sub.getFiller.isOWLClass => {
153 val cls = RSAUtil.getFreshOWLClass() 172 val cls = fresh.getOWLClass
154 Seq( 173 Seq(
155 factory.getOWLSubClassOfAxiom(sub.getFiller, cls), 174 factory.getOWLSubClassOfAxiom(sub.getFiller, cls),
156 factory.getOWLSubClassOfAxiom( 175 factory.getOWLSubClassOfAxiom(
157 factory.getOWLObjectSomeValuesFrom(sub.getProperty, cls), 176 factory.getOWLObjectSomeValuesFrom(sub.getProperty, cls),
158 sup 177 sup
159 ) 178 )
160 ).flatMap(normalize) 179 ).flatMap(normalize(_)(fresh))
161 } 180 }
162 /** Complex class expression on existential restriction on the rhs 181 /** Complex class expression on existential restriction on the rhs
163 * 182 *
@@ -165,18 +184,45 @@ class Normalizer() {
165 */ 184 */
166 case (_, sup: OWLObjectSomeValuesFrom) 185 case (_, sup: OWLObjectSomeValuesFrom)
167 if !sup.getFiller.isOWLClass => { 186 if !sup.getFiller.isOWLClass => {
168 val cls = RSAUtil.getFreshOWLClass() 187 val cls = fresh.getOWLClass
169 Seq( 188 Seq(
170 factory.getOWLSubClassOfAxiom(cls, sup.getFiller), 189 factory.getOWLSubClassOfAxiom(cls, sup.getFiller),
171 factory.getOWLSubClassOfAxiom( 190 factory.getOWLSubClassOfAxiom(
172 sub, 191 sub,
173 factory.getOWLObjectSomeValuesFrom(sup.getProperty, cls) 192 factory.getOWLObjectSomeValuesFrom(sup.getProperty, cls)
174 ) 193 )
175 ).flatMap(normalize) 194 ).flatMap(normalize(_)(fresh))
195 }
196 /** Object universal quantification on the lhs
197 *
198 * forall R . B c A
199 * ¬ A c ¬∀forall R . B
200 * ¬ A c exists R . ¬ B
201 * ¬ A c C, C c R . ¬ B
202 * top c A u C, D c ¬ B, C c exists R . D
203 * top c A u C, D n B c bot, C c exists R . D
204 */
205 case (sub: OWLObjectAllValuesFrom, _) => {
206 val role = sub.getProperty
207 val filler = sub.getFiller
208 val (c, d) = (fresh.getOWLClass, fresh.getOWLClass)
209 Seq(
210 factory.getOWLSubClassOfAxiom(
211 factory.getOWLThing,
212 factory.getOWLObjectUnionOf(sup, c)
213 ),
214 factory.getOWLSubClassOfAxiom(
215 factory.getOWLObjectIntersectionOf(d, filler),
216 factory.getOWLNothing
217 ),
218 factory.getOWLSubClassOfAxiom(
219 c,
220 factory.getOWLObjectSomeValuesFrom(role, d)
221 )
222 )
176 } 223 }
177 /** Object/Data universal quantification on the lhs not supported */ 224 /** Object/Data universal quantification on the lhs */
178 case (sub: OWLObjectAllValuesFrom, _) => notInHornALCHOIQ(a) 225 case (sub: OWLDataAllValuesFrom, _) => notSupported(a)
179 case (sub: OWLDataAllValuesFrom, _) => notInHornALCHOIQ(a)
180 /** Object universal quantification on the rhs 226 /** Object universal quantification on the rhs
181 * 227 *
182 * C c forall R . D -> exists R- . C c D 228 * C c forall R . D -> exists R- . C c D
@@ -191,9 +237,9 @@ class Normalizer() {
191 ), 237 ),
192 sup.getFiller 238 sup.getFiller
193 ) 239 )
194 ) 240 )(fresh)
195 /** Object universal quantification on the rhs not supported */ 241 /** Object universal quantification on the rhs not supported */
196 case (_, sup: OWLDataAllValuesFrom) => notInHornALCHOIQ(a) 242 case (_, sup: OWLDataAllValuesFrom) => notSupported(a)
197 /** Exact object/data cardinality restriction on the lhs/rhs 243 /** Exact object/data cardinality restriction on the lhs/rhs
198 * 244 *
199 * = i R . C -> <= i R . C n >= i R . X 245 * = i R . C -> <= i R . C n >= i R . X
@@ -201,19 +247,19 @@ class Normalizer() {
201 case (sub: OWLObjectExactCardinality, _) => 247 case (sub: OWLObjectExactCardinality, _) =>
202 normalize( 248 normalize(
203 factory.getOWLSubClassOfAxiom(sub.asIntersectionOfMinMax, sup) 249 factory.getOWLSubClassOfAxiom(sub.asIntersectionOfMinMax, sup)
204 ) 250 )(fresh)
205 case (sub: OWLDataExactCardinality, _) => 251 case (sub: OWLDataExactCardinality, _) =>
206 normalize( 252 normalize(
207 factory.getOWLSubClassOfAxiom(sub.asIntersectionOfMinMax, sup) 253 factory.getOWLSubClassOfAxiom(sub.asIntersectionOfMinMax, sup)
208 ) 254 )(fresh)
209 case (_, sup: OWLObjectExactCardinality) => 255 case (_, sup: OWLObjectExactCardinality) =>
210 normalize( 256 normalize(
211 factory.getOWLSubClassOfAxiom(sub, sup.asIntersectionOfMinMax) 257 factory.getOWLSubClassOfAxiom(sub, sup.asIntersectionOfMinMax)
212 ) 258 )(fresh)
213 case (_, sup: OWLDataExactCardinality) => 259 case (_, sup: OWLDataExactCardinality) =>
214 normalize( 260 normalize(
215 factory.getOWLSubClassOfAxiom(sub, sup.asIntersectionOfMinMax) 261 factory.getOWLSubClassOfAxiom(sub, sup.asIntersectionOfMinMax)
216 ) 262 )(fresh)
217 /** Min object/data cardinality restriction on the lhs/rhs 263 /** Min object/data cardinality restriction on the lhs/rhs
218 * 264 *
219 * >= 0 R . C -> top 265 * >= 0 R . C -> top
@@ -226,7 +272,7 @@ class Normalizer() {
226 case 0 => 272 case 0 =>
227 normalize( 273 normalize(
228 factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup) 274 factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup)
229 ) 275 )(fresh)
230 case 1 => 276 case 1 =>
231 normalize( 277 normalize(
232 factory.getOWLSubClassOfAxiom( 278 factory.getOWLSubClassOfAxiom(
@@ -236,15 +282,15 @@ class Normalizer() {
236 ), 282 ),
237 sup 283 sup
238 ) 284 )
239 ) 285 )(fresh)
240 case _ => notInHornALCHOIQ(a) 286 case _ => notSupported(a)
241 } 287 }
242 case (sub: OWLDataMinCardinality, _) => 288 case (sub: OWLDataMinCardinality, _) =>
243 sub.getCardinality match { 289 sub.getCardinality match {
244 case 0 => 290 case 0 =>
245 normalize( 291 normalize(
246 factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup) 292 factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup)
247 ) 293 )(fresh)
248 case 1 => 294 case 1 =>
249 normalize( 295 normalize(
250 factory.getOWLSubClassOfAxiom( 296 factory.getOWLSubClassOfAxiom(
@@ -254,8 +300,8 @@ class Normalizer() {
254 ), 300 ),
255 sup 301 sup
256 ) 302 )
257 ) 303 )(fresh)
258 case _ => notInHornALCHOIQ(a) 304 case _ => notSupported(a)
259 } 305 }
260 case (_, sup: OWLObjectMinCardinality) => 306 case (_, sup: OWLObjectMinCardinality) =>
261 sup.getCardinality match { 307 sup.getCardinality match {
@@ -269,8 +315,8 @@ class Normalizer() {
269 sup.getFiller 315 sup.getFiller
270 ) 316 )
271 ) 317 )
272 ) 318 )(fresh)
273 case _ => notInHornALCHOIQ(a) 319 case _ => notSupported(a)
274 } 320 }
275 case (_, sup: OWLDataMinCardinality) => 321 case (_, sup: OWLDataMinCardinality) =>
276 sup.getCardinality match { 322 sup.getCardinality match {
@@ -284,12 +330,12 @@ class Normalizer() {
284 sup.getFiller 330 sup.getFiller
285 ) 331 )
286 ) 332 )
287 ) 333 )(fresh)
288 case _ => notInHornALCHOIQ(a) 334 case _ => notSupported(a)
289 } 335 }
290 /** Max object/data cardinality restriction on the lhs not supported */ 336 /** Max object/data cardinality restriction on the lhs not supported */
291 case (sub: OWLObjectMaxCardinality, _) => notInHornALCHOIQ(a) 337 case (sub: OWLObjectMaxCardinality, _) => notSupported(a)
292 case (sub: OWLDataMaxCardinality, _) => notInHornALCHOIQ(a) 338 case (sub: OWLDataMaxCardinality, _) => notSupported(a)
293 /** Max object/data cardinality restriction on the rhs 339 /** Max object/data cardinality restriction on the rhs
294 * 340 *
295 * C c <= 0 R . D -> C n exists R . D -> bot 341 * C c <= 0 R . D -> C n exists R . D -> bot
@@ -307,20 +353,20 @@ class Normalizer() {
307 ), 353 ),
308 factory.getOWLNothing 354 factory.getOWLNothing
309 ) 355 )
310 ) 356 )(fresh)
311 case (_, sup: OWLObjectMaxCardinality) 357 case (_, sup: OWLObjectMaxCardinality)
312 if sup.getCardinality == 1 && !sup.getFiller.isOWLClass => { 358 if sup.getCardinality == 1 && !sup.getFiller.isOWLClass => {
313 val cls = RSAUtil.getFreshOWLClass() 359 val cls = fresh.getOWLClass
314 Seq( 360 Seq(
315 factory.getOWLSubClassOfAxiom(cls, sup.getFiller), 361 factory.getOWLSubClassOfAxiom(cls, sup.getFiller),
316 factory.getOWLSubClassOfAxiom( 362 factory.getOWLSubClassOfAxiom(
317 sub, 363 sub,
318 factory.getOWLObjectMaxCardinality(1, sup.getProperty, cls) 364 factory.getOWLObjectMaxCardinality(1, sup.getProperty, cls)
319 ) 365 )
320 ).flatMap(normalize) 366 ).flatMap(normalize(_)(fresh))
321 } 367 }
322 case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => 368 case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 =>
323 notInHornALCHOIQ(a) 369 notSupported(a)
324 case (_, sup: OWLDataMaxCardinality) if sup.getCardinality == 0 => 370 case (_, sup: OWLDataMaxCardinality) if sup.getCardinality == 0 =>
325 normalize( 371 normalize(
326 factory.getOWLSubClassOfAxiom( 372 factory.getOWLSubClassOfAxiom(
@@ -331,9 +377,9 @@ class Normalizer() {
331 ), 377 ),
332 factory.getOWLNothing 378 factory.getOWLNothing
333 ) 379 )
334 ) 380 )(fresh)
335 case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => 381 case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 =>
336 notInHornALCHOIQ(a) 382 notSupported(a)
337 /** HasValue expression on the lhs/rhs 383 /** HasValue expression on the lhs/rhs
338 * 384 *
339 * HasValue(R, a) -> exists R . {a} 385 * HasValue(R, a) -> exists R . {a}
@@ -347,7 +393,7 @@ class Normalizer() {
347 ), 393 ),
348 sup 394 sup
349 ) 395 )
350 ) 396 )(fresh)
351 case (sub: OWLDataHasValue, _) => 397 case (sub: OWLDataHasValue, _) =>
352 normalize( 398 normalize(
353 factory.getOWLSubClassOfAxiom( 399 factory.getOWLSubClassOfAxiom(
@@ -357,7 +403,7 @@ class Normalizer() {
357 ), 403 ),
358 sup 404 sup
359 ) 405 )
360 ) 406 )(fresh)
361 case (_, sup: OWLObjectHasValue) => 407 case (_, sup: OWLObjectHasValue) =>
362 normalize( 408 normalize(
363 factory.getOWLSubClassOfAxiom( 409 factory.getOWLSubClassOfAxiom(
@@ -367,7 +413,7 @@ class Normalizer() {
367 factory.getOWLObjectOneOf(sup.getFiller) 413 factory.getOWLObjectOneOf(sup.getFiller)
368 ) 414 )
369 ) 415 )
370 ) 416 )(fresh)
371 case (_, sup: OWLDataHasValue) => 417 case (_, sup: OWLDataHasValue) =>
372 normalize( 418 normalize(
373 factory.getOWLSubClassOfAxiom( 419 factory.getOWLSubClassOfAxiom(
@@ -377,7 +423,7 @@ class Normalizer() {
377 factory.getOWLDataOneOf(sup.getFiller) 423 factory.getOWLDataOneOf(sup.getFiller)
378 ) 424 )
379 ) 425 )
380 ) 426 )(fresh)
381 /** Enumeration of individuals on the lhs 427 /** Enumeration of individuals on the lhs
382 * 428 *
383 * {a1, ... ,an} c D -> { D(a1), ..., D(an) } 429 * {a1, ... ,an} c D -> { D(a1), ..., D(an) }
@@ -385,23 +431,29 @@ class Normalizer() {
385 case (sub: OWLObjectOneOf, _) => 431 case (sub: OWLObjectOneOf, _) =>
386 sub.getIndividuals.map(factory.getOWLClassAssertionAxiom(sup, _)) 432 sub.getIndividuals.map(factory.getOWLClassAssertionAxiom(sup, _))
387 /** Enumeration of individuals on the rhs 433 /** Enumeration of individuals on the rhs
388 * It's supported only when of cardinality < 2. 434 *
435 * A c {a1, ... ,an} -> { A c {a1} u ... u {an} }
389 */ 436 */
390 case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length == 0 =>
391 normalize(factory.getOWLSubClassOfAxiom(sub, factory.getOWLNothing))
392 case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => 437 case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 =>
393 notInHornALCHOIQ(a) 438 normalize(
439 factory.getOWLSubClassOfAxiom(
440 sub,
441 factory.getOWLObjectUnionOf(
442 sup.getIndividuals.map(factory.getOWLObjectOneOf(_))
443 )
444 )
445 )(fresh)
394 /** Class complement on the lhs 446 /** Class complement on the lhs
395 * 447 *
396 * ~C c D -> top c C n D 448 * ~C c D -> top c C u D
397 */ 449 */
398 case (sub: OWLObjectComplementOf, _) => 450 case (sub: OWLObjectComplementOf, _) =>
399 normalize( 451 normalize(
400 factory.getOWLSubClassOfAxiom( 452 factory.getOWLSubClassOfAxiom(
401 factory.getOWLThing, 453 factory.getOWLThing,
402 factory.getOWLObjectIntersectionOf(sub.getComplementNNF, sup) 454 factory.getOWLObjectUnionOf(sub.getComplementNNF, sup)
403 ) 455 )
404 ) 456 )(fresh)
405 /** Class complement on the rhs 457 /** Class complement on the rhs
406 * 458 *
407 * C c ~D -> C n D c bot 459 * C c ~D -> C n D c bot
@@ -412,10 +464,10 @@ class Normalizer() {
412 factory.getOWLObjectIntersectionOf(sup.getComplementNNF, sub), 464 factory.getOWLObjectIntersectionOf(sup.getComplementNNF, sub),
413 factory.getOWLNothing 465 factory.getOWLNothing
414 ) 466 )
415 ) 467 )(fresh)
416 /** Self-restriction over an object property */ 468 /** Self-restriction over an object property */
417 case (sub: OWLObjectHasSelf, _) => notInHornALCHOIQ(a) 469 case (sub: OWLObjectHasSelf, _) => notSupported(a)
418 case (_, sup: OWLObjectHasSelf) => notInHornALCHOIQ(a) 470 case (_, sup: OWLObjectHasSelf) => notSupported(a)
419 471
420 /** Axiom is already normalized */ 472 /** Axiom is already normalized */
421 case _ => Seq(a) 473 case _ => Seq(a)
@@ -423,32 +475,34 @@ class Normalizer() {
423 } 475 }
424 476
425 case a: OWLEquivalentClassesAxiom => { 477 case a: OWLEquivalentClassesAxiom => {
426 a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap(normalize) 478 a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap(
479 normalize(_)(fresh)
480 )
427 } 481 }
428 482
429 case a: OWLEquivalentObjectPropertiesAxiom => { 483 case a: OWLEquivalentObjectPropertiesAxiom => {
430 a.getAxiomWithoutAnnotations.asSubObjectPropertyOfAxioms.flatMap( 484 a.getAxiomWithoutAnnotations.asSubObjectPropertyOfAxioms.flatMap(
431 normalize 485 normalize(_)(fresh)
432 ) 486 )
433 } 487 }
434 488
435 case a: OWLEquivalentDataPropertiesAxiom => { 489 case a: OWLEquivalentDataPropertiesAxiom => {
436 a.getAxiomWithoutAnnotations.asSubDataPropertyOfAxioms.flatMap( 490 a.getAxiomWithoutAnnotations.asSubDataPropertyOfAxioms.flatMap(
437 normalize 491 normalize(_)(fresh)
438 ) 492 )
439 } 493 }
440 494
441 case a: OWLObjectPropertyDomainAxiom => 495 case a: OWLObjectPropertyDomainAxiom =>
442 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 496 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
443 497
444 case a: OWLObjectPropertyRangeAxiom => 498 case a: OWLObjectPropertyRangeAxiom =>
445 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 499 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
446 500
447 case a: OWLDataPropertyDomainAxiom => 501 case a: OWLDataPropertyDomainAxiom =>
448 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 502 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
449 503
450 case a: OWLDataPropertyRangeAxiom => 504 case a: OWLDataPropertyRangeAxiom =>
451 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 505 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
452 506
453 case a: OWLDisjointClassesAxiom => 507 case a: OWLDisjointClassesAxiom =>
454 a.asPairwiseAxioms.map((a) => { 508 a.asPairwiseAxioms.map((a) => {
@@ -461,20 +515,22 @@ class Normalizer() {
461 515
462 case a: OWLInverseObjectPropertiesAxiom => 516 case a: OWLInverseObjectPropertiesAxiom =>
463 a.getAxiomWithoutAnnotations.asSubObjectPropertyOfAxioms.flatMap( 517 a.getAxiomWithoutAnnotations.asSubObjectPropertyOfAxioms.flatMap(
464 normalize 518 normalize(_)(fresh)
465 ) 519 )
466 520
467 case a: OWLFunctionalObjectPropertyAxiom => 521 case a: OWLFunctionalObjectPropertyAxiom =>
468 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 522 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
469 523
470 case a: OWLFunctionalDataPropertyAxiom => 524 case a: OWLFunctionalDataPropertyAxiom =>
471 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 525 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
472 526
473 case a: OWLInverseFunctionalObjectPropertyAxiom => 527 case a: OWLInverseFunctionalObjectPropertyAxiom =>
474 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 528 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
475 529
476 case a: OWLSymmetricObjectPropertyAxiom => 530 case a: OWLSymmetricObjectPropertyAxiom =>
477 a.getAxiomWithoutAnnotations.asSubPropertyAxioms.flatMap(normalize) 531 a.getAxiomWithoutAnnotations.asSubPropertyAxioms.flatMap(
532 normalize(_)(fresh)
533 )
478 534
479 case a: OWLDifferentIndividualsAxiom => 535 case a: OWLDifferentIndividualsAxiom =>
480 a.asPairwiseAxioms.map((a) => { 536 a.asPairwiseAxioms.map((a) => {
@@ -486,44 +542,46 @@ class Normalizer() {
486 }) 542 })
487 543
488 case a: OWLIrreflexiveObjectPropertyAxiom => 544 case a: OWLIrreflexiveObjectPropertyAxiom =>
489 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 545 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
490 546
491 case a: OWLSameIndividualAxiom => 547 case a: OWLSameIndividualAxiom =>
492 a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap(normalize) 548 a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap(
549 normalize(_)(fresh)
550 )
493 551
494 case a: OWLDisjointUnionAxiom => 552 case a: OWLDisjointUnionAxiom =>
495 Seq(a.getOWLDisjointClassesAxiom, a.getOWLEquivalentClassesAxiom) 553 Seq(a.getOWLDisjointClassesAxiom, a.getOWLEquivalentClassesAxiom)
496 .flatMap(normalize) 554 .flatMap(normalize(_)(fresh))
497 555
498 /** Complex class assertion 556 /** Complex class assertion
499 * 557 *
500 * C(a) -> { X(a), X c C } 558 * C(a) -> { X(a), X c C }
501 */ 559 */
502 case a: OWLClassAssertionAxiom if !a.getClassExpression.isOWLClass => { 560 case a: OWLClassAssertionAxiom if !a.getClassExpression.isOWLClass => {
503 val cls = RSAUtil.getFreshOWLClass() 561 val cls = fresh.getOWLClass
504 Seq( 562 Seq(
505 factory.getOWLClassAssertionAxiom(cls, a.getIndividual), 563 factory.getOWLClassAssertionAxiom(cls, a.getIndividual),
506 factory.getOWLSubClassOfAxiom(cls, a.getClassExpression) 564 factory.getOWLSubClassOfAxiom(cls, a.getClassExpression)
507 ).flatMap(normalize) 565 ).flatMap(normalize(_)(fresh))
508 } 566 }
509 567
510 case a: OWLNegativeObjectPropertyAssertionAxiom => 568 case a: OWLNegativeObjectPropertyAssertionAxiom =>
511 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 569 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
512 570
513 case a: OWLNegativeDataPropertyAssertionAxiom => 571 case a: OWLNegativeDataPropertyAssertionAxiom =>
514 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) 572 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
515 573
516 /** Not in Horn-ALCHOIQ */ 574 case a: OWLTransitiveObjectPropertyAxiom => {
517 575 val role = a.getProperty
518 case a: OWLTransitiveObjectPropertyAxiom => notInHornALCHOIQ(a) 576 normalize(
519 577 factory.getOWLSubPropertyChainOfAxiom(List(role, role), role)
520 case a: OWLReflexiveObjectPropertyAxiom => notInHornALCHOIQ(a) 578 )(fresh)
521 579 }
522 case a: OWLSubPropertyChainOfAxiom => notInHornALCHOIQ(a)
523 580
524 /** Unsupported */ 581 case a: OWLReflexiveObjectPropertyAxiom =>
582 normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh)
525 583
526 case a: OWLAsymmetricObjectPropertyAxiom => notInHornALCHOIQ(a) 584 case a: OWLAsymmetricObjectPropertyAxiom => notSupported(a)
527 585
528 case a: OWLDatatypeDefinitionAxiom => notSupported(a) 586 case a: OWLDatatypeDefinitionAxiom => notSupported(a)
529 587
@@ -536,75 +594,19 @@ class Normalizer() {
536 case a: SWRLRule => notSupported(a) 594 case a: SWRLRule => notSupported(a)
537 595
538 /** Axiom is already normalized */ 596 /** Axiom is already normalized */
597 //case a: OWLSubPropertyChainOfAxiom => notSupported(a)
539 case a => Seq(a) 598 case a => Seq(a)
540 } 599 }
541 600
542 /** Shift an axiom with disjunction on the rhs */ 601 /** Non supported axioms */
543 private def shift( 602 private def notSupported(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = {
544 sub: OWLClassExpression,
545 sup: OWLObjectUnionOf
546 ): Seq[OWLLogicalAxiom] = {
547 val body =
548 sub.asConjunctSet.map((atom) => (atom, RSAUtil.getFreshOWLClass()))
549 val head =
550 sup.asDisjunctSet.map((atom) => (atom, RSAUtil.getFreshOWLClass()))
551
552 /* Update statistics */
553 shifted += 1
554
555 val r1 =
556 factory.getOWLSubClassOfAxiom(
557 factory.getOWLObjectIntersectionOf(
558 (body.map(_._1) ++ head.map(_._2)): _*
559 ),
560 factory.getOWLNothing
561 )
562
563 val r2s =
564 for {
565 (a, na) <- head
566 hs = head.map(_._2).filterNot(_ equals na)
567 } yield factory.getOWLSubClassOfAxiom(
568 factory.getOWLObjectIntersectionOf(
569 (body.map(_._1) ++ hs): _*
570 ),
571 a
572 )
573
574 val r3s =
575 for {
576 (a, na) <- body
577 bs = body.map(_._1).filterNot(_ equals a)
578 } yield factory.getOWLSubClassOfAxiom(
579 factory.getOWLObjectIntersectionOf(
580 (bs ++ head.map(_._2)): _*
581 ),
582 na
583 )
584
585 Seq(r1) ++ r2s ++ r3s
586 }
587
588 /** Approximation function for axioms out of Horn-ALCHOIQ
589 *
590 * By default discards the axiom, which guarantees a lower bound
591 * ontology w.r.t. CQ answering.
592 */
593 protected def notInHornALCHOIQ(
594 axiom: OWLLogicalAxiom
595 ): Seq[OWLLogicalAxiom] = {
596 /* Update statistics */
597 discarded += 1
598 Logger.print( 603 Logger.print(
599 s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ", 604 s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ",
600 Logger.VERBOSE 605 Logger.VERBOSE
601 ) 606 )
602 Seq() 607 Seq()
608 // throw new RuntimeException(
609 // s"'$axiom' is not currently supported."
610 // )
603 } 611 }
604
605 /** Non supported axioms */
606 private def notSupported(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] =
607 throw new RuntimeException(
608 s"'$axiom' is not currently supported."
609 )
610} 612}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala
index 3aa3c5f..2f48798 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala
@@ -24,13 +24,13 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{
24 BindAtom, 24 BindAtom,
25 BodyFormula, 25 BodyFormula,
26 Rule, 26 Rule,
27 TupleTableAtom 27 TupleTableAtom,
28 TupleTableName
28} 29}
29import tech.oxfordsemantic.jrdfox.logic.expression.{Term, IRI, FunctionCall} 30import tech.oxfordsemantic.jrdfox.logic.expression.{Term, IRI, FunctionCall}
30import uk.ac.ox.cs.rsacomb.RSAUtil
31import uk.ac.ox.cs.rsacomb.RSAOntology 31import uk.ac.ox.cs.rsacomb.RSAOntology
32import uk.ac.ox.cs.rsacomb.suffix.{Empty, Inverse, RSASuffix} 32import uk.ac.ox.cs.rsacomb.suffix.{Empty, Inverse, RSASuffix}
33import uk.ac.ox.cs.rsacomb.util.{RSA, RDFoxUtil} 33import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA, RDFoxUtil}
34 34
35/** Horn-ALCHOIQ to RDFox axiom converter. 35/** Horn-ALCHOIQ to RDFox axiom converter.
36 * 36 *
@@ -60,6 +60,10 @@ trait RDFoxConverter {
60 private val manager = OWLManager.createOWLOntologyManager() 60 private val manager = OWLManager.createOWLOntologyManager()
61 private val factory = manager.getOWLDataFactory() 61 private val factory = manager.getOWLDataFactory()
62 62
63 /** Default named graph to be used when generating new atoms */
64 val graph: TupleTableName =
65 TupleTableName.create("http://oxfordsemantic.tech/RDFox#DefaultTriples")
66
63 /** Represents the result of the conversion of a 67 /** Represents the result of the conversion of a
64 * [[org.semanticweb.owlapi.model.OWLClassExpression OWLClassExpression]]. 68 * [[org.semanticweb.owlapi.model.OWLClassExpression OWLClassExpression]].
65 * 69 *
@@ -91,35 +95,37 @@ trait RDFoxConverter {
91 protected def ResultF(atoms: List[TupleTableAtom]): Result = (atoms, List()) 95 protected def ResultF(atoms: List[TupleTableAtom]): Result = (atoms, List())
92 protected def ResultR(rules: List[Rule]): Result = (List(), rules) 96 protected def ResultR(rules: List[Rule]): Result = (List(), rules)
93 97
94 /** Converts a 98 /** Converts a [[OWLLogicalAxiom]] into a collection of [[TupleTableAtoms]] and [[Rules]].
95 * [[org.semanticweb.owlapi.model.OWLLogicalAxiom OWLLogicalAxiom]]
96 * into a collection of
97 * [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtoms]]
98 * and
99 * [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rules]].
100 * 99 *
101 * @note not all possible axioms are handled correctly, and in 100 * @note not all possible axioms are handled correctly, and in
102 * general they are assumed to be normalised. Following is a list of 101 * general they are assumed to be normalised. Following is a list of
103 * all unhandled class expressions: 102 * all unhandled class expressions:
104 * - [[org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom OWLAsymmetricObjectPropertyAxiom]] 103 * - [[org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom OWLAsymmetricObjectPropertyAxiom]]
105 * - [[org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom OWLDataPropertyAssertionAxiom]]
106 * - [[org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom OWLDataPropertyRangeAxiom]]
107 * - [[org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom OWLDatatypeDefinitionAxiom]] 104 * - [[org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom OWLDatatypeDefinitionAxiom]]
108 * - [[org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom OWLDifferentIndividualsAxiom]]
109 * - [[org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom OWLDisjointDataPropertiesAxiom]] 105 * - [[org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom OWLDisjointDataPropertiesAxiom]]
110 * - [[org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom OWLDisjointObjectPropertiesAxiom]] 106 * - [[org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom OWLDisjointObjectPropertiesAxiom]]
111 * - [[org.semanticweb.owlapi.model.OWLDisjointUnionAxiom OWLDisjointUnionAxiom]]
112 * - [[org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom OWLEquivalentDataPropertiesAxiom]]
113 * - [[org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom OWLFunctionalDataPropertyAxiom]]
114 * - [[org.semanticweb.owlapi.model.OWLHasKeyAxiom OWLHasKeyAxiom]] 107 * - [[org.semanticweb.owlapi.model.OWLHasKeyAxiom OWLHasKeyAxiom]]
115 * - [[org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom OWLIrreflexiveObjectPropertyAxiom]] 108 * - [[org.semanticweb.owlapi.model.SWRLRule SWRLRule]]
116 * - [[org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom OWLNegativeDataPropertyAssertionAxiom]] 109 *
117 * - [[org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom OWLNegativeObjectPropertyAssertionAxiom]] 110 * @note The following axioms are not handled directly but can be
111 * normalised beforehand.
112 *
113 * - [[org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom OWLTransitiveObjectPropertyAxiom]]
114 * - [[org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom OWLDataPropertyAssertionAxiom]]
115 * - [[org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom OWLDataPropertyRangeAxiom]]
116 * - [[org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom OWLDifferentIndividualsAxiom]]
118 * - [[org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom OWLReflexiveObjectPropertyAxiom]] 117 * - [[org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom OWLReflexiveObjectPropertyAxiom]]
119 * - [[org.semanticweb.owlapi.model.OWLSameIndividualAxiom OWLSameIndividualAxiom]] 118 * - [[org.semanticweb.owlapi.model.OWLSameIndividualAxiom OWLSameIndividualAxiom]]
120 * - [[org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom OWLSubPropertyChainOfAxiom]] 119 * - [[org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom OWLNegativeDataPropertyAssertionAxiom]]
121 * - [[org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom OWLTransitiveObjectPropertyAxiom]] 120 * - [[org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom OWLNegativeObjectPropertyAssertionAxiom]]
122 * - [[org.semanticweb.owlapi.model.SWRLRule SWRLRule]] 121 * - [[org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom OWLIrreflexiveObjectPropertyAxiom]]
122 * - [[org.semanticweb.owlapi.model.OWLDisjointUnionAxiom OWLDisjointUnionAxiom]]
123 * - [[org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom OWLEquivalentDataPropertiesAxiom]]
124 * - [[org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom OWLFunctionalDataPropertyAxiom]]
125 *
126 * @see [[Normaliser]]
127 * @see
128 * http://owlcs.github.io/owlapi/apidocs_5/index.html
123 */ 129 */
124 def convert( 130 def convert(
125 axiom: OWLLogicalAxiom, 131 axiom: OWLLogicalAxiom,
@@ -127,16 +133,16 @@ trait RDFoxConverter {
127 unsafe: List[OWLObjectPropertyExpression], 133 unsafe: List[OWLObjectPropertyExpression],
128 skolem: SkolemStrategy, 134 skolem: SkolemStrategy,
129 suffix: RSASuffix 135 suffix: RSASuffix
130 ): Result = 136 )(implicit fresh: DataFactory): Result =
131 axiom match { 137 axiom match {
132 138
133 case a: OWLSubClassOfAxiom => { 139 case a: OWLSubClassOfAxiom => {
134 val subcls = a.getSubClass 140 val subcls = a.getSubClass
135 val supcls = a.getSuperClass 141 val supcls = a.getSuperClass
136 val (sub, _) = 142 val (sub, _) =
137 convert(subcls, term, unsafe, NoSkolem, suffix) 143 convert(subcls, term, unsafe, NoSkolem, suffix)(fresh)
138 val (sup, ext) = 144 val (sup, ext) =
139 convert(supcls, term, unsafe, skolem, suffix) 145 convert(supcls, term, unsafe, skolem, suffix)(fresh)
140 val rule = Rule.create(sup, ext ::: sub) 146 val rule = Rule.create(sup, ext ::: sub)
141 ResultR(List(rule)) 147 ResultR(List(rule))
142 } 148 }
@@ -146,7 +152,7 @@ trait RDFoxConverter {
146 case a: OWLEquivalentClassesAxiom => { 152 case a: OWLEquivalentClassesAxiom => {
147 val (atoms, rules) = a.asPairwiseAxioms 153 val (atoms, rules) = a.asPairwiseAxioms
148 .flatMap(_.asOWLSubClassOfAxioms) 154 .flatMap(_.asOWLSubClassOfAxioms)
149 .map(a => convert(a, term, unsafe, skolem dup a, suffix)) 155 .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh))
150 .unzip 156 .unzip
151 (atoms.flatten, rules.flatten) 157 (atoms.flatten, rules.flatten)
152 } 158 }
@@ -154,61 +160,65 @@ trait RDFoxConverter {
154 case a: OWLEquivalentObjectPropertiesAxiom => { 160 case a: OWLEquivalentObjectPropertiesAxiom => {
155 val (atoms, rules) = a.asPairwiseAxioms 161 val (atoms, rules) = a.asPairwiseAxioms
156 .flatMap(_.asSubObjectPropertyOfAxioms) 162 .flatMap(_.asSubObjectPropertyOfAxioms)
157 .map(a => convert(a, term, unsafe, skolem dup a, suffix)) 163 .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh))
158 .unzip 164 .unzip
159 (atoms.flatten, rules.flatten) 165 (atoms.flatten, rules.flatten)
160 } 166 }
161 167
162 case a: OWLSubObjectPropertyOfAxiom => { 168 case a: OWLSubObjectPropertyOfAxiom => {
163 val term1 = RSAUtil.genFreshVariable() 169 val term1 = fresh.getVariable
164 val body = convert(a.getSubProperty, term, term1, suffix) 170 val body = convert(a.getSubProperty, term, term1, suffix)(fresh)
165 val head = convert(a.getSuperProperty, term, term1, suffix) 171 val head = convert(a.getSuperProperty, term, term1, suffix)(fresh)
166 ResultR(List(Rule.create(head, body))) 172 ResultR(List(Rule.create(head, body)))
167 } 173 }
168 174
169 case a: OWLSubDataPropertyOfAxiom => { 175 case a: OWLSubDataPropertyOfAxiom => {
170 val term1 = RSAUtil.genFreshVariable() 176 val term1 = fresh.getVariable
171 val body = convert(a.getSubProperty, term, term1, suffix) 177 val body = convert(a.getSubProperty, term, term1, suffix)(fresh)
172 val head = convert(a.getSuperProperty, term, term1, suffix) 178 val head = convert(a.getSuperProperty, term, term1, suffix)(fresh)
173 ResultR(List(Rule.create(head, body))) 179 ResultR(List(Rule.create(head, body)))
174 } 180 }
175 181
176 case a: OWLObjectPropertyDomainAxiom => 182 case a: OWLObjectPropertyDomainAxiom =>
177 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) 183 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh)
178 184
179 case a: OWLObjectPropertyRangeAxiom => { 185 case a: OWLObjectPropertyRangeAxiom => {
180 val term1 = RSAUtil.genFreshVariable() 186 val term1 = fresh.getVariable
181 val (res, ext) = convert(a.getRange, term, unsafe, skolem, suffix) 187 val (res, ext) =
182 val prop = convert(a.getProperty, term1, term, suffix) 188 convert(a.getRange, term, unsafe, skolem, suffix)(fresh)
189 val prop = convert(a.getProperty, term1, term, suffix)(fresh)
183 ResultR(List(Rule.create(res, prop :: ext))) 190 ResultR(List(Rule.create(res, prop :: ext)))
184 } 191 }
185 192
186 case a: OWLDataPropertyDomainAxiom => 193 case a: OWLDataPropertyDomainAxiom =>
187 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) 194 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh)
188 195
189 case a: OWLDisjointClassesAxiom => { 196 case a: OWLDisjointClassesAxiom => {
190 val body = a.getOperandsAsList.asScala.toSeq 197 val body = a.getOperandsAsList.asScala.toSeq
191 .flatMap((cls) => convert(cls, term, unsafe, NoSkolem, suffix)._1) 198 .flatMap((cls) =>
192 val bottom = TupleTableAtom.rdf(term, IRI.RDF_TYPE, IRI.NOTHING) 199 convert(cls, term, unsafe, NoSkolem, suffix)(fresh)._1
200 )
201 val bottom =
202 TupleTableAtom.create(graph, term, IRI.RDF_TYPE, IRI.NOTHING)
193 ResultR(List(Rule.create(bottom, body: _*))) 203 ResultR(List(Rule.create(bottom, body: _*)))
194 } 204 }
195 205
196 case a: OWLInverseObjectPropertiesAxiom => { 206 case a: OWLInverseObjectPropertiesAxiom => {
197 val (atoms, rules) = a.asSubObjectPropertyOfAxioms 207 val (atoms, rules) = a.asSubObjectPropertyOfAxioms
198 .map(a => convert(a, term, unsafe, skolem dup a, suffix)) 208 .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh))
199 .unzip 209 .unzip
200 (atoms.flatten, rules.flatten) 210 (atoms.flatten, rules.flatten)
201 } 211 }
202 212
203 case a: OWLFunctionalObjectPropertyAxiom => 213 case a: OWLFunctionalObjectPropertyAxiom =>
204 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) 214 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh)
205 215
206 case a: OWLInverseFunctionalObjectPropertyAxiom => 216 case a: OWLInverseFunctionalObjectPropertyAxiom =>
207 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) 217 convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh)
208 218
209 case a: OWLSymmetricObjectPropertyAxiom => { 219 case a: OWLSymmetricObjectPropertyAxiom => {
210 val (atoms, rules) = a.asSubPropertyAxioms 220 val (atoms, rules) = a.asSubPropertyAxioms
211 .map(a => convert(a, term, unsafe, skolem dup a, suffix)) 221 .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh))
212 .unzip 222 .unzip
213 (atoms.flatten, rules.flatten) 223 (atoms.flatten, rules.flatten)
214 } 224 }
@@ -219,7 +229,7 @@ trait RDFoxConverter {
219 case i: OWLNamedIndividual => { 229 case i: OWLNamedIndividual => {
220 val cls = a.getClassExpression 230 val cls = a.getClassExpression
221 val (res, _) = 231 val (res, _) =
222 convert(cls, i.getIRI, unsafe, NoSkolem, suffix) 232 convert(cls, i.getIRI, unsafe, NoSkolem, suffix)(fresh)
223 ResultF(res) 233 ResultF(res)
224 } 234 }
225 case _ => Result() 235 case _ => Result()
@@ -232,7 +242,7 @@ trait RDFoxConverter {
232 else { 242 else {
233 val subj = a.getSubject.asOWLNamedIndividual.getIRI 243 val subj = a.getSubject.asOWLNamedIndividual.getIRI
234 val obj = a.getObject.asOWLNamedIndividual.getIRI 244 val obj = a.getObject.asOWLNamedIndividual.getIRI
235 val prop = convert(a.getProperty, subj, obj, suffix) 245 val prop = convert(a.getProperty, subj, obj, suffix)(fresh)
236 ResultF(List(prop)) 246 ResultF(List(prop))
237 } 247 }
238 248
@@ -246,24 +256,30 @@ trait RDFoxConverter {
246 else { 256 else {
247 val subj = a.getSubject.asOWLNamedIndividual.getIRI 257 val subj = a.getSubject.asOWLNamedIndividual.getIRI
248 val obj = a.getObject 258 val obj = a.getObject
249 val prop = convert(a.getProperty, subj, obj, suffix) 259 val prop = convert(a.getProperty, subj, obj, suffix)(fresh)
250 ResultF(List(prop)) 260 ResultF(List(prop))
251 } 261 }
252 262
253 case a: OWLDataPropertyRangeAxiom => 263 case a: OWLSubPropertyChainOfAxiom => {
254 Result() // ignored 264 val (term1, body) =
255 265 a.getPropertyChain.foldLeft((term, List[TupleTableAtom]())) {
256 case a: OWLFunctionalDataPropertyAxiom => 266 case ((term, atoms), prop) => {
257 Result() 267 val term1 = fresh.getVariable
258 268 val atom = convert(prop, term, term1, suffix)(fresh)
259 case a: OWLTransitiveObjectPropertyAxiom => 269 (term1, atoms :+ atom)
260 Result() 270 }
271 }
272 val head = convert(a.getSuperProperty, term, term1, suffix)(fresh)
273 val rule = Rule.create(head, body)
274 println(rule)
275 ResultR(List(rule))
276 }
261 277
262 /** Catch-all case for all unhandled axiom types. */ 278 /** Catch-all case for all unhandled axiom types. */
263 case a => default(axiom) 279 case a => unsupported(axiom)
264 } 280 }
265 281
266 protected def default(axiom: OWLLogicalAxiom): Result = 282 protected def unsupported(axiom: OWLLogicalAxiom): Result =
267 throw new RuntimeException(s"Axiom '$axiom' is not supported (yet?)") 283 throw new RuntimeException(s"Axiom '$axiom' is not supported (yet?)")
268 284
269 /** Converts a class expression into a collection of atoms. 285 /** Converts a class expression into a collection of atoms.
@@ -291,7 +307,7 @@ trait RDFoxConverter {
291 unsafe: List[OWLObjectPropertyExpression], 307 unsafe: List[OWLObjectPropertyExpression],
292 skolem: SkolemStrategy, 308 skolem: SkolemStrategy,
293 suffix: RSASuffix 309 suffix: RSASuffix
294 ): Shards = 310 )(implicit fresh: DataFactory): Shards =
295 expr match { 311 expr match {
296 312
297 /** Simple class name. 313 /** Simple class name.
@@ -300,7 +316,7 @@ trait RDFoxConverter {
300 */ 316 */
301 case e: OWLClass => { 317 case e: OWLClass => {
302 val iri: IRI = if (e.isTopEntity()) IRI.THING else e.getIRI 318 val iri: IRI = if (e.isTopEntity()) IRI.THING else e.getIRI
303 val atom = TupleTableAtom.rdf(term, IRI.RDF_TYPE, iri) 319 val atom = TupleTableAtom.create(graph, term, IRI.RDF_TYPE, iri)
304 (List(atom), List()) 320 (List(atom), List())
305 } 321 }
306 322
@@ -310,7 +326,7 @@ trait RDFoxConverter {
310 */ 326 */
311 case e: OWLObjectIntersectionOf => { 327 case e: OWLObjectIntersectionOf => {
312 val (res, ext) = e.asConjunctSet 328 val (res, ext) = e.asConjunctSet
313 .map(convert(_, term, unsafe, skolem, suffix)) 329 .map(convert(_, term, unsafe, skolem, suffix)(fresh))
314 .unzip 330 .unzip
315 (res.flatten, ext.flatten) 331 (res.flatten, ext.flatten)
316 } 332 }
@@ -330,7 +346,8 @@ trait RDFoxConverter {
330 .collect { case x: OWLNamedIndividual => x } 346 .collect { case x: OWLNamedIndividual => x }
331 if (named.length != 1) 347 if (named.length != 1)
332 throw new RuntimeException(s"Class expression '$e' has arity != 1.") 348 throw new RuntimeException(s"Class expression '$e' has arity != 1.")
333 val atom = TupleTableAtom.rdf(term, IRI.SAME_AS, named.head.getIRI) 349 val atom =
350 TupleTableAtom.create(graph, term, RSA.CONGRUENT, named.head.getIRI)
334 (List(atom), List()) 351 (List(atom), List())
335 } 352 }
336 353
@@ -344,14 +361,14 @@ trait RDFoxConverter {
344 case e: OWLObjectSomeValuesFrom => { 361 case e: OWLObjectSomeValuesFrom => {
345 val cls = e.getFiller() 362 val cls = e.getFiller()
346 val role = e.getProperty() 363 val role = e.getProperty()
347 val varX = RSAUtil.genFreshVariable 364 val varX = fresh.getVariable
348 val (bind, term1) = skolem match { 365 val (bind, term1) = skolem match {
349 case NoSkolem => (None, varX) 366 case NoSkolem => (None, varX)
350 case c: Constant => (None, c.iri) 367 case c: Constant => (None, c.iri)
351 case s: Standard => (Some(RDFoxUtil.skolem(s.name, term, varX)), varX) 368 case s: Standard => (Some(RDFoxUtil.skolem(s.name, term, varX)), varX)
352 } 369 }
353 val (res, ext) = convert(cls, term1, unsafe, skolem, suffix) 370 val (res, ext) = convert(cls, term1, unsafe, skolem, suffix)(fresh)
354 val prop = convert(role, term, term1, suffix) 371 val prop = convert(role, term, term1, suffix)(fresh)
355 (prop :: res, ext ++ bind) 372 (prop :: res, ext ++ bind)
356 } 373 }
357 374
@@ -371,13 +388,13 @@ trait RDFoxConverter {
371 // Computes the result of rule skolemization. Depending on the used 388 // Computes the result of rule skolemization. Depending on the used
372 // technique it might involve the introduction of additional atoms, 389 // technique it might involve the introduction of additional atoms,
373 // and/or fresh constants and variables. 390 // and/or fresh constants and variables.
374 val varX = RSAUtil.genFreshVariable 391 val varX = fresh.getVariable
375 val (bind, term1) = skolem match { 392 val (bind, term1) = skolem match {
376 case NoSkolem => (None, varX) 393 case NoSkolem => (None, varX)
377 case c: Constant => (None, c.iri) 394 case c: Constant => (None, c.iri)
378 case s: Standard => (Some(RDFoxUtil.skolem(s.name, term, varX)), varX) 395 case s: Standard => (Some(RDFoxUtil.skolem(s.name, term, varX)), varX)
379 } 396 }
380 val prop = convert(role, term, term1, suffix) 397 val prop = convert(role, term, term1, suffix)(fresh)
381 (List(prop), bind.toList) 398 (List(prop), bind.toList)
382 } 399 }
383 400
@@ -396,12 +413,13 @@ trait RDFoxConverter {
396 s"Class expression '$e' has cardinality restriction != 1." 413 s"Class expression '$e' has cardinality restriction != 1."
397 ) 414 )
398 val vars @ (y :: z :: _) = 415 val vars @ (y :: z :: _) =
399 Seq(RSAUtil.genFreshVariable(), RSAUtil.genFreshVariable()) 416 Seq(fresh.getVariable, fresh.getVariable)
400 val cls = e.getFiller 417 val cls = e.getFiller
401 val role = e.getProperty 418 val role = e.getProperty
402 val (res, ext) = vars.map(convert(cls, _, unsafe, skolem, suffix)).unzip 419 val (res, ext) =
403 val props = vars.map(convert(role, term, _, suffix)) 420 vars.map(convert(cls, _, unsafe, skolem, suffix)(fresh)).unzip
404 val eq = TupleTableAtom.rdf(y, IRI.SAME_AS, z) 421 val props = vars.map(convert(role, term, _, suffix)(fresh))
422 val eq = TupleTableAtom.create(graph, y, RSA.CONGRUENT, z)
405 (List(eq), res.flatten ++ props) 423 (List(eq), res.flatten ++ props)
406 } 424 }
407 425
@@ -423,7 +441,7 @@ trait RDFoxConverter {
423 val filler = e.getFiller 441 val filler = e.getFiller
424 val property = e.getProperty 442 val property = e.getProperty
425 val expr = factory.getOWLObjectSomeValuesFrom(property, filler) 443 val expr = factory.getOWLObjectSomeValuesFrom(property, filler)
426 convert(expr, term, unsafe, skolem, suffix) 444 convert(expr, term, unsafe, skolem, suffix)(fresh)
427 } 445 }
428 446
429 /** Minimum cardinality restriction class 447 /** Minimum cardinality restriction class
@@ -444,7 +462,7 @@ trait RDFoxConverter {
444 val filler = e.getFiller 462 val filler = e.getFiller
445 val property = e.getProperty 463 val property = e.getProperty
446 val expr = factory.getOWLDataSomeValuesFrom(property, filler) 464 val expr = factory.getOWLDataSomeValuesFrom(property, filler)
447 convert(expr, term, unsafe, skolem, suffix) 465 convert(expr, term, unsafe, skolem, suffix)(fresh)
448 } 466 }
449 467
450 //case (_, sup: OWLObjectExactCardinality) => { 468 //case (_, sup: OWLObjectExactCardinality) => {
@@ -467,7 +485,7 @@ trait RDFoxConverter {
467 case i: OWLNamedIndividual => i.getIRI 485 case i: OWLNamedIndividual => i.getIRI
468 case i: OWLAnonymousIndividual => i.getID 486 case i: OWLAnonymousIndividual => i.getID
469 } 487 }
470 (List(convert(e.getProperty, term, term1, suffix)), List()) 488 (List(convert(e.getProperty, term, term1, suffix)(fresh)), List())
471 } 489 }
472 490
473 /** Existential quantification with singleton literal filler 491 /** Existential quantification with singleton literal filler
@@ -476,7 +494,7 @@ trait RDFoxConverter {
476 * [[http://www.w3.org/TR/owl2-syntax/#Literal_Value_Restriction]] 494 * [[http://www.w3.org/TR/owl2-syntax/#Literal_Value_Restriction]]
477 */ 495 */
478 case e: OWLDataHasValue => 496 case e: OWLDataHasValue =>
479 (List(convert(e.getProperty, term, e.getFiller, suffix)), List()) 497 (List(convert(e.getProperty, term, e.getFiller, suffix)(fresh)), List())
480 498
481 case e: OWLObjectUnionOf => { 499 case e: OWLObjectUnionOf => {
482 (List(), List()) 500 (List(), List())
@@ -495,7 +513,7 @@ trait RDFoxConverter {
495 term1: Term, 513 term1: Term,
496 term2: Term, 514 term2: Term,
497 suffix: RSASuffix 515 suffix: RSASuffix
498 ): TupleTableAtom = 516 )(implicit fresh: DataFactory): TupleTableAtom =
499 expr match { 517 expr match {
500 518
501 /** Simple named role/object property. 519 /** Simple named role/object property.
@@ -504,7 +522,7 @@ trait RDFoxConverter {
504 */ 522 */
505 case e: OWLObjectProperty => { 523 case e: OWLObjectProperty => {
506 val role = IRI.create(e.getIRI.getIRIString :: suffix) 524 val role = IRI.create(e.getIRI.getIRIString :: suffix)
507 TupleTableAtom.rdf(term1, role, term2) 525 TupleTableAtom.create(graph, term1, role, term2)
508 } 526 }
509 527
510 /** Inverse of a named role/property 528 /** Inverse of a named role/property
@@ -516,7 +534,7 @@ trait RDFoxConverter {
516 */ 534 */
517 case e: OWLObjectInverseOf => 535 case e: OWLObjectInverseOf =>
518 //convert(e.getInverse, term1, term2, suffix + Inverse) 536 //convert(e.getInverse, term1, term2, suffix + Inverse)
519 convert(e.getInverse, term2, term1, suffix) 537 convert(e.getInverse, term2, term1, suffix)(fresh)
520 538
521 /** The infamous impossible case. 539 /** The infamous impossible case.
522 * 540 *
@@ -535,7 +553,7 @@ trait RDFoxConverter {
535 term1: Term, 553 term1: Term,
536 term2: Term, 554 term2: Term,
537 suffix: RSASuffix 555 suffix: RSASuffix
538 ): TupleTableAtom = 556 )(implicit fresh: DataFactory): TupleTableAtom =
539 expr match { 557 expr match {
540 558
541 /** Simple named role/data property 559 /** Simple named role/data property
@@ -544,7 +562,7 @@ trait RDFoxConverter {
544 */ 562 */
545 case e: OWLDataProperty => { 563 case e: OWLDataProperty => {
546 val role = IRI.create(e.getIRI.getIRIString :: suffix) 564 val role = IRI.create(e.getIRI.getIRIString :: suffix)
547 TupleTableAtom.rdf(term1, role, term2) 565 TupleTableAtom.create(graph, term1, role, term2)
548 } 566 }
549 567
550 /** The infamous impossible case. 568 /** The infamous impossible case.
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala
index 2774cb1..075954e 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala
@@ -17,25 +17,39 @@
17package uk.ac.ox.cs.rsacomb.filtering 17package uk.ac.ox.cs.rsacomb.filtering
18 18
19import tech.oxfordsemantic.jrdfox.logic.datalog.Rule 19import tech.oxfordsemantic.jrdfox.logic.datalog.Rule
20import tech.oxfordsemantic.jrdfox.logic.expression.IRI
20import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery 21import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery
21import uk.ac.ox.cs.rsacomb.util.Versioned 22import uk.ac.ox.cs.rsacomb.util.Versioned
22 23
24/** Type of filtering strategy.
25 *
26 * Mainly for testing different approaches and techniques.
27 */
23sealed trait FilterType 28sealed trait FilterType
24object FilterType { 29object FilterType {
25 case object NAIVE extends FilterType 30 case object NAIVE extends FilterType
26 case object REVISED extends FilterType 31 case object REVISED extends FilterType
27} 32}
28 33
34/** Filtering program trait */
29object FilteringProgram extends Versioned[FilterType] { 35object FilteringProgram extends Versioned[FilterType] {
30 36
31 import FilterType._ 37 import FilterType._
32 38
33 type Result = (ConjunctiveQuery) => FilteringProgram 39 type Result = (IRI, IRI, ConjunctiveQuery) => FilteringProgram
34 40
35 def apply(t: FilterType): (ConjunctiveQuery) => FilteringProgram = 41 /** Returns the right type of filtering program builder.
36 t match { 42 *
37 case NAIVE => NaiveFilteringProgram(_) 43 * @param filter type of filtering program.
38 case REVISED => RevisedFilteringProgram(_) 44 * @param source source named graph for the filtering program.
45 * @param target target named graph for the filtering program.
46 *
47 * @return the right type of filtering program builder.
48 */
49 def apply(filter: FilterType): Result =
50 filter match {
51 case NAIVE => NaiveFilteringProgram(_, _, _)
52 case REVISED => RevisedFilteringProgram(_, _, _)
39 } 53 }
40} 54}
41 55
@@ -46,6 +60,12 @@ object FilteringProgram extends Versioned[FilterType] {
46 */ 60 */
47trait FilteringProgram { 61trait FilteringProgram {
48 62
63 /** Source named graph for the filtering process */
64 val source: IRI
65
66 /** Target named graph for the filtering process */
67 val target: IRI
68
49 /** Query from which the filtering program is generated */ 69 /** Query from which the filtering program is generated */
50 val query: ConjunctiveQuery 70 val query: ConjunctiveQuery
51 71
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala
index 45dd867..6174c9d 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala
@@ -21,23 +21,35 @@ import tech.oxfordsemantic.jrdfox.logic.Datatype
21import tech.oxfordsemantic.jrdfox.logic.datalog.{ 21import tech.oxfordsemantic.jrdfox.logic.datalog.{
22 Rule, 22 Rule,
23 TupleTableAtom, 23 TupleTableAtom,
24 TupleTableName,
24 BodyFormula, 25 BodyFormula,
25 Negation 26 Negation
26} 27}
27import tech.oxfordsemantic.jrdfox.logic.expression.{Term, Variable} 28import tech.oxfordsemantic.jrdfox.logic.expression.{
29 IRI,
30 Literal,
31 Term,
32 Variable
33}
28import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery 34import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery
29import uk.ac.ox.cs.rsacomb.suffix.{Forward, Backward} 35import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Forward, Backward, Nth}
30import uk.ac.ox.cs.rsacomb.util.{RSA, RDFoxUtil} 36import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA, RDFoxUtil}
31 37
32/** Factory for [[uk.ac.ox.cs.rsacomb.FilteringProgram FilteringProgram]] */ 38/** Factory for [[uk.ac.ox.cs.rsacomb.FilteringProgram FilteringProgram]] */
33object NaiveFilteringProgram { 39object NaiveFilteringProgram {
34 40
35 /** Create a new FilteringProgram instance. 41 /** Create a new FilteringProgram instance.
36 * 42 *
43 * @param source source named graph for the filtering program.
44 * @param target target named graph for the filtering program.
37 * @param query CQ to be converted into logic rules. 45 * @param query CQ to be converted into logic rules.
38 */ 46 */
39 def apply(query: ConjunctiveQuery): FilteringProgram = 47 def apply(
40 new NaiveFilteringProgram(query) 48 source: IRI,
49 target: IRI,
50 query: ConjunctiveQuery
51 ): FilteringProgram =
52 new NaiveFilteringProgram(source, target, query)
41} 53}
42 54
43/** Filtering Program generator 55/** Filtering Program generator
@@ -47,14 +59,23 @@ object NaiveFilteringProgram {
47 * 59 *
48 * Instances can be created using the companion object. 60 * Instances can be created using the companion object.
49 */ 61 */
50class NaiveFilteringProgram(val query: ConjunctiveQuery) 62class NaiveFilteringProgram(
51 extends FilteringProgram { 63 val source: IRI,
64 val target: IRI,
65 val query: ConjunctiveQuery
66) extends FilteringProgram {
52 67
53 /** Extends capabilities of 68 /** Extends capabilities of
54 * [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]] 69 * [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]]
55 */ 70 */
56 import uk.ac.ox.cs.rsacomb.implicits.RSAAtom._ 71 import uk.ac.ox.cs.rsacomb.implicits.RSAAtom._
57 72
73 /** Simplify conversion to RDFox specific types */
74 import uk.ac.ox.cs.rsacomb.implicits.RDFox._
75
76 /** Simplify conversion between Java and Scala `List`s */
77 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
78
58 /** Implicit parameter used in RSA internal predicates. 79 /** Implicit parameter used in RSA internal predicates.
59 * 80 *
60 * @see [[uk.ac.ox.cs.rsacomb.util.RSA]] for more information. 81 * @see [[uk.ac.ox.cs.rsacomb.util.RSA]] for more information.
@@ -72,6 +93,44 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
72 private val varU = Variable.create("U") 93 private val varU = Variable.create("U")
73 private val varW = Variable.create("W") 94 private val varW = Variable.create("W")
74 95
96 /** `TupleTableName`s for the source/targer named graphs */
97 val tts: TupleTableName = TupleTableName.create(source.getIRI)
98 val ttt: TupleTableName = TupleTableName.create(target.getIRI)
99
100 /** Set of atoms in the body of the query */
101 private val queryBody: List[TupleTableAtom] = query.atoms(tts)
102
103 /** Helpers */
104 private def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom)
105
106 private val QM: TupleTableAtom =
107 TupleTableAtom.create(ttt, RSA.QM :: query.answer ::: query.bounded)
108 private def ID(t1: Term, t2: Term) =
109 TupleTableAtom.create(
110 ttt,
111 RSA.ID +: (query.answer ::: query.bounded) :+ t1 :+ t2
112 )
113 private def NI(term: Term) =
114 TupleTableAtom.create(ttt, term, IRI.RDF_TYPE, RSA.NI)
115 private def TQ(sx: RSASuffix, t1: Term, t2: Term) =
116 TupleTableAtom.create(
117 ttt,
118 (RSA.TQ :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2
119 )
120 private def AQ(sx: RSASuffix, t1: Term, t2: Term) =
121 TupleTableAtom.create(
122 ttt,
123 (RSA.AQ :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2
124 )
125 private val FK: TupleTableAtom =
126 TupleTableAtom.create(ttt, RSA.FK :: query.answer ::: query.bounded)
127 private val SP: TupleTableAtom =
128 TupleTableAtom.create(ttt, RSA.SP :: query.answer ::: query.bounded)
129 private def Ans = if (query.bcq)
130 TupleTableAtom.create(ttt, RSA("blank"), IRI.RDF_TYPE, RSA.ANS)
131 else
132 TupleTableAtom.create(ttt, RSA.ANS :: query.answer)
133
75 /** Rule generating the instances of the predicate `rsa:NI`. 134 /** Rule generating the instances of the predicate `rsa:NI`.
76 * 135 *
77 * According to the original paper, the set of `rsa:NI` is defined as 136 * According to the original paper, the set of `rsa:NI` is defined as
@@ -88,20 +147,21 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
88 * generate in the filtering program using a logic rule. 147 * generate in the filtering program using a logic rule.
89 */ 148 */
90 val nis: Rule = 149 val nis: Rule =
91 Rule.create(RSA.NI(varX), RSA.Congruent(varX, varY), RSA.Named(varY)) 150 Rule.create(
151 NI(varX),
152 RSA.Congruent(tts)(varX, varY),
153 RSA.Named(tts)(varY)
154 )
92 155
93 /** Collection of filtering program rules. */ 156 /** Collection of filtering program rules. */
94 val rules: List[Rule] = 157 val rules: List[Rule] =
95 nis :: { 158 nis :: {
96 159
97 /** Negates a [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]] */
98 def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom)
99
100 /** Generates all possible, unfiltered answers. 160 /** Generates all possible, unfiltered answers.
101 * 161 *
102 * @note corresponds to rule 1 in Table 3 in the paper. 162 * @note corresponds to rule 1 in Table 3 in the paper.
103 */ 163 */
104 val r1 = Rule.create(RSA.QM, query.atoms: _*) 164 val r1 = Rule.create(QM, queryBody: _*)
105 165
106 /** Initializes instances of `rsa:ID`. 166 /** Initializes instances of `rsa:ID`.
107 * 167 *
@@ -113,56 +173,77 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
113 */ 173 */
114 val r3a = 174 val r3a =
115 for ((v, i) <- query.bounded.zipWithIndex) 175 for ((v, i) <- query.bounded.zipWithIndex)
116 yield Rule.create(RSA.ID(RSA(i), RSA(i)), RSA.QM, not(RSA.NI(v))) 176 yield Rule.create(ID(RSA(i), RSA(i)), QM, not(NI(v)))
117 val r3b = Rule.create(RSA.ID(varV, varU), RSA.ID(varU, varV)) 177 val r3b = Rule.create(ID(varV, varU), ID(varU, varV))
118 val r3c = 178 val r3c =
119 Rule.create(RSA.ID(varU, varW), RSA.ID(varU, varV), RSA.ID(varV, varW)) 179 Rule.create(ID(varU, varW), ID(varU, varV), ID(varV, varW))
120 180
121 /** Detects forks in the canonical model. 181 /** Detects forks in the canonical model.
122 * 182 *
123 * @note corresponds to rules 4x in Table 3. 183 * @note corresponds to rules 4x in Table 3.
124 */ 184 */
125 val r4a = for { 185 val r4a = for {
126 role1 <- query.atoms filter (_.isRoleAssertion) 186 role1 <- queryBody filter (_.isRoleAssertion)
127 index1 = query.bounded indexOf (role1.getArguments get 2) 187 index1 = query.bounded indexOf (role1.getArguments get 2)
128 if index1 >= 0 188 if index1 >= 0
129 role2 <- query.atoms filter (_.isRoleAssertion) 189 role2 <- queryBody filter (_.isRoleAssertion)
130 index2 = query.bounded indexOf (role2.getArguments get 2) 190 index2 = query.bounded indexOf (role2.getArguments get 2)
131 if index2 >= 0 191 if index2 >= 0
132 } yield Rule.create( 192 } yield Rule.create(
133 RSA.FK, 193 FK,
134 RSA.ID(RSA(index1), RSA(index2)), 194 ID(RSA(index1), RSA(index2)),
135 role1 << Forward, 195 role1 :: Forward,
136 role2 << Forward, 196 role2 :: Forward,
137 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 0)) 197 not(
198 TupleTableAtom.create(
199 tts,
200 role1.getArguments get 0,
201 RSA.CONGRUENT,
202 role2.getArguments get 0
203 )
204 )
138 ) 205 )
139 val r4b = for { 206 val r4b = for {
140 role1 <- query.atoms filter (_.isRoleAssertion) 207 role1 <- queryBody filter (_.isRoleAssertion)
141 index1 = query.bounded indexOf (role1.getArguments get 2) 208 index1 = query.bounded indexOf (role1.getArguments get 2)
142 if index1 >= 0 209 if index1 >= 0
143 role2 <- query.atoms filter (_.isRoleAssertion) 210 role2 <- queryBody filter (_.isRoleAssertion)
144 index2 = query.bounded indexOf (role2.getArguments get 0) 211 index2 = query.bounded indexOf (role2.getArguments get 0)
145 if index2 >= 0 212 if index2 >= 0
146 } yield Rule.create( 213 } yield Rule.create(
147 RSA.FK, 214 FK,
148 RSA.ID(RSA(index1), RSA(index2)), 215 ID(RSA(index1), RSA(index2)),
149 role1 << Forward, 216 role1 :: Forward,
150 role2 << Backward, 217 role2 :: Backward,
151 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 2)) 218 not(
219 TupleTableAtom.create(
220 tts,
221 role1.getArguments get 0,
222 RSA.CONGRUENT,
223 role2.getArguments get 2
224 )
225 )
152 ) 226 )
153 val r4c = for { 227 val r4c = for {
154 role1 <- query.atoms filter (_.isRoleAssertion) 228 role1 <- queryBody filter (_.isRoleAssertion)
155 index1 = query.bounded indexOf (role1.getArguments get 0) 229 index1 = query.bounded indexOf (role1.getArguments get 0)
156 if index1 >= 0 230 if index1 >= 0
157 role2 <- query.atoms filter (_.isRoleAssertion) 231 role2 <- queryBody filter (_.isRoleAssertion)
158 index2 = query.bounded indexOf (role2.getArguments get 0) 232 index2 = query.bounded indexOf (role2.getArguments get 0)
159 if index2 >= 0 233 if index2 >= 0
160 } yield Rule.create( 234 } yield Rule.create(
161 RSA.FK, 235 FK,
162 RSA.ID(RSA(index1), RSA(index2)), 236 ID(RSA(index1), RSA(index2)),
163 role1 << Backward, 237 role1 :: Backward,
164 role2 << Backward, 238 role2 :: Backward,
165 not(RSA.Congruent(role1.getArguments get 2, role2.getArguments get 2)) 239 not(
240 TupleTableAtom.create(
241 tts,
242 role1.getArguments get 2,
243 RSA.CONGRUENT,
244 role2.getArguments get 2
245 )
246 )
166 ) 247 )
167 248
168 /** Recursively propagates `rsa:ID` predicate. 249 /** Recursively propagates `rsa:ID` predicate.
@@ -170,79 +251,79 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
170 * @note corresponds to rules 5x in Table 3. 251 * @note corresponds to rules 5x in Table 3.
171 */ 252 */
172 val r5a = for { 253 val r5a = for {
173 role1 <- query.atoms filter (_.isRoleAssertion) 254 role1 <- queryBody filter (_.isRoleAssertion)
174 r1arg0 = role1.getArguments get 0 255 r1arg0 = role1.getArguments get 0
175 if query.bounded contains r1arg0 256 if query.bounded contains r1arg0
176 r1arg2 = role1.getArguments get 2 257 r1arg2 = role1.getArguments get 2
177 if query.bounded contains r1arg2 258 if query.bounded contains r1arg2
178 role2 <- query.atoms filter (_.isRoleAssertion) 259 role2 <- queryBody filter (_.isRoleAssertion)
179 r2arg0 = role2.getArguments get 0 260 r2arg0 = role2.getArguments get 0
180 if query.bounded contains r2arg0 261 if query.bounded contains r2arg0
181 r2arg2 = role2.getArguments get 2 262 r2arg2 = role2.getArguments get 2
182 if query.bounded contains r2arg2 263 if query.bounded contains r2arg2
183 } yield Rule.create( 264 } yield Rule.create(
184 RSA.ID( 265 ID(
185 RSA(query.bounded indexOf r1arg0), 266 RSA(query.bounded indexOf r1arg0),
186 RSA(query.bounded indexOf r2arg0) 267 RSA(query.bounded indexOf r2arg0)
187 ), 268 ),
188 RSA.ID( 269 ID(
189 RSA(query.bounded indexOf r1arg2), 270 RSA(query.bounded indexOf r1arg2),
190 RSA(query.bounded indexOf r2arg2) 271 RSA(query.bounded indexOf r2arg2)
191 ), 272 ),
192 RSA.Congruent(r1arg0, r2arg0), 273 TupleTableAtom.create(tts, r1arg0, RSA.CONGRUENT, r2arg0),
193 role1 << Forward, 274 role1 :: Forward,
194 role2 << Forward, 275 role2 :: Forward,
195 not(RSA.NI(r1arg0)) 276 not(NI(r1arg0))
196 ) 277 )
197 val r5b = for { 278 val r5b = for {
198 role1 <- query.atoms filter (_.isRoleAssertion) 279 role1 <- queryBody filter (_.isRoleAssertion)
199 r1arg0 = role1.getArguments get 0 280 r1arg0 = role1.getArguments get 0
200 if query.bounded contains r1arg0 281 if query.bounded contains r1arg0
201 r1arg2 = role1.getArguments get 2 282 r1arg2 = role1.getArguments get 2
202 if query.bounded contains r1arg2 283 if query.bounded contains r1arg2
203 role2 <- query.atoms filter (_.isRoleAssertion) 284 role2 <- queryBody filter (_.isRoleAssertion)
204 r2arg0 = role2.getArguments get 0 285 r2arg0 = role2.getArguments get 0
205 if query.bounded contains r2arg0 286 if query.bounded contains r2arg0
206 r2arg2 = role2.getArguments get 2 287 r2arg2 = role2.getArguments get 2
207 if query.bounded contains r2arg2 288 if query.bounded contains r2arg2
208 } yield Rule.create( 289 } yield Rule.create(
209 RSA.ID( 290 ID(
210 RSA(query.bounded indexOf r1arg0), 291 RSA(query.bounded indexOf r1arg0),
211 RSA(query.bounded indexOf r2arg2) 292 RSA(query.bounded indexOf r2arg2)
212 ), 293 ),
213 RSA.ID( 294 ID(
214 RSA(query.bounded indexOf r1arg2), 295 RSA(query.bounded indexOf r1arg2),
215 RSA(query.bounded indexOf r2arg0) 296 RSA(query.bounded indexOf r2arg0)
216 ), 297 ),
217 RSA.Congruent(r1arg0, r2arg2), 298 TupleTableAtom.create(tts, r1arg0, RSA.CONGRUENT, r2arg2),
218 role1 << Forward, 299 role1 :: Forward,
219 role2 << Backward, 300 role2 :: Backward,
220 not(RSA.NI(r1arg0)) 301 not(NI(r1arg0))
221 ) 302 )
222 val r5c = for { 303 val r5c = for {
223 role1 <- query.atoms filter (_.isRoleAssertion) 304 role1 <- queryBody filter (_.isRoleAssertion)
224 r1arg0 = role1.getArguments get 0 305 r1arg0 = role1.getArguments get 0
225 if query.bounded contains r1arg0 306 if query.bounded contains r1arg0
226 r1arg2 = role1.getArguments get 2 307 r1arg2 = role1.getArguments get 2
227 if query.bounded contains r1arg2 308 if query.bounded contains r1arg2
228 role2 <- query.atoms filter (_.isRoleAssertion) 309 role2 <- queryBody filter (_.isRoleAssertion)
229 r2arg0 = role2.getArguments get 0 310 r2arg0 = role2.getArguments get 0
230 if query.bounded contains r2arg0 311 if query.bounded contains r2arg0
231 r2arg2 = role2.getArguments get 2 312 r2arg2 = role2.getArguments get 2
232 if query.bounded contains r2arg2 313 if query.bounded contains r2arg2
233 } yield Rule.create( 314 } yield Rule.create(
234 RSA.ID( 315 ID(
235 RSA(query.bounded indexOf r1arg2), 316 RSA(query.bounded indexOf r1arg2),
236 RSA(query.bounded indexOf r2arg2) 317 RSA(query.bounded indexOf r2arg2)
237 ), 318 ),
238 RSA.ID( 319 ID(
239 RSA(query.bounded indexOf r1arg0), 320 RSA(query.bounded indexOf r1arg0),
240 RSA(query.bounded indexOf r2arg0) 321 RSA(query.bounded indexOf r2arg0)
241 ), 322 ),
242 RSA.Congruent(r1arg2, r2arg2), 323 TupleTableAtom.create(tts, r1arg2, RSA.CONGRUENT, r2arg2),
243 role1 << Backward, 324 role1 :: Backward,
244 role2 << Backward, 325 role2 :: Backward,
245 not(RSA.NI(r1arg2)) 326 not(NI(r1arg2))
246 ) 327 )
247 328
248 /** Detect cycles in the canonical model. 329 /** Detect cycles in the canonical model.
@@ -254,30 +335,30 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
254 * @note corresponds to rules 6,7x in Table 3. 335 * @note corresponds to rules 6,7x in Table 3.
255 */ 336 */
256 val r6 = for { 337 val r6 = for {
257 role <- query.atoms filter (_.isRoleAssertion) 338 role <- queryBody filter (_.isRoleAssertion)
258 index0 = query.bounded indexOf (role.getArguments get 0) 339 index0 = query.bounded indexOf (role.getArguments get 0)
259 if index0 >= 0 340 if index0 >= 0
260 index2 = query.bounded indexOf (role.getArguments get 2) 341 index2 = query.bounded indexOf (role.getArguments get 2)
261 if index2 >= 0 342 if index2 >= 0
262 suffix <- Seq(Forward, Backward) 343 suffix <- Seq(Forward, Backward)
263 } yield Rule.create( 344 } yield Rule.create(
264 RSA.AQ(varV, varW, suffix), 345 AQ(suffix, varV, varW),
265 role << suffix, 346 role :: suffix,
266 RSA.ID(RSA(index0), varV), 347 ID(RSA(index0), varV),
267 RSA.ID(RSA(index2), varW) 348 ID(RSA(index2), varW)
268 ) 349 )
269 val r7a = 350 val r7a =
270 for (suffix <- List(Forward, Backward)) 351 for (suffix <- List(Forward, Backward))
271 yield Rule.create( 352 yield Rule.create(
272 RSA.TQ(varU, varV, suffix), 353 TQ(suffix, varU, varV),
273 RSA.AQ(varU, varV, suffix) 354 AQ(suffix, varU, varV)
274 ) 355 )
275 val r7b = 356 val r7b =
276 for (suffix <- List(Forward, Backward)) 357 for (suffix <- List(Forward, Backward))
277 yield Rule.create( 358 yield Rule.create(
278 RSA.TQ(varU, varW, suffix), 359 TQ(suffix, varU, varW),
279 RSA.AQ(varU, varV, suffix), 360 AQ(suffix, varU, varV),
280 RSA.TQ(varV, varW, suffix) 361 TQ(suffix, varV, varW)
281 ) 362 )
282 363
283 /** Flag spurious answers. 364 /** Flag spurious answers.
@@ -286,14 +367,15 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
286 */ 367 */
287 val r8a = 368 val r8a =
288 for (v <- query.answer) 369 for (v <- query.answer)
289 yield Rule.create(RSA.SP, RSA.QM, not(RSA.Named(v)))
290 val r8b = Rule.create(RSA.SP, RSA.FK)
291 val r8c =
292 for (suffix <- List(Forward, Backward))
293 yield Rule.create( 370 yield Rule.create(
294 RSA.SP, 371 SP,
295 RSA.TQ(varV, varV, suffix) 372 QM,
373 not(TupleTableAtom.create(tts, v, IRI.RDF_TYPE, RSA.NAMED))
296 ) 374 )
375 val r8b = Rule.create(SP, FK)
376 val r8c =
377 for (suffix <- List(Forward, Backward))
378 yield Rule.create(SP, TQ(suffix, varV, varV))
297 379
298 /** Determine answers to the query 380 /** Determine answers to the query
299 * 381 *
@@ -310,7 +392,7 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
310 * 392 *
311 * @note corresponds to rule 9 in Table 3. 393 * @note corresponds to rule 9 in Table 3.
312 */ 394 */
313 val r9 = Rule.create(RSA.Ans, RSA.QM, not(RSA.SP)) 395 val r9 = Rule.create(Ans, QM, not(SP))
314 396
315 (r1 :: 397 (r1 ::
316 r3a ::: r3b :: r3c :: 398 r3a ::: r3b :: r3c ::
@@ -318,8 +400,70 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery)
318 r5a ::: r5b ::: r5c ::: 400 r5a ::: r5b ::: r5c :::
319 r6 ::: r7b ::: r7a ::: 401 r6 ::: r7b ::: r7a :::
320 r8a ::: r8b :: r8c ::: 402 r8a ::: r8b :: r8c :::
321 r9 :: List()) map RDFoxUtil.reify 403 r9 :: List()) map reify
404 }
405
406 /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rule]].
407 *
408 * This is needed because RDFox supports only predicates of arity 1
409 * or 2, but the filtering program uses predicates with higher arity.
410 *
411 * @note we can perform a reification of the atoms thanks to the
412 * built-in `SKOLEM` funtion of RDFox.
413 */
414 def reify(rule: Rule): Rule = {
415 val (sk, as) = rule.getHead.map(reify).unzip
416 val head: List[TupleTableAtom] = as.flatten
417 val skolem: List[BodyFormula] = sk.flatten
418 val body: List[BodyFormula] = rule.getBody.map(reify).flatten
419 Rule.create(head, skolem ::: body)
420 }
421
422 /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.BodyFormula BodyFormula]]. */
423 private def reify(formula: BodyFormula): List[BodyFormula] = {
424 formula match {
425 case atom: TupleTableAtom => reify(atom)._2
426 case neg: Negation => {
427 val (sk, as) = neg.getNegatedAtoms
428 .map({
429 case a: TupleTableAtom => reify(a)
430 case a => (None, List(a))
431 })
432 .unzip
433 val skolem =
434 sk.flatten.map(_.getArguments.last).collect { case v: Variable => v }
435 val atoms = as.flatten
436 List(Negation.create(skolem, atoms))
437 }
438 case other => List(other)
439 }
440 }
441
442 /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]]. */
443 private def reify(atom: TupleTableAtom)(implicit
444 fresh: DataFactory
445 ): (Option[TupleTableAtom], List[TupleTableAtom]) = {
446 if (atom.getArguments.length == 3) {
447 (None, List(atom))
448 } else {
449 val varS: Variable = fresh.getVariable
450 val (pred :: args): List[Term] = atom.getArguments
451 val name = pred.asInstanceOf[IRI].getIRI
452 val skolem = TupleTableAtom.create(
453 TupleTableName.SKOLEM,
454 Literal.create(name, Datatype.XSD_STRING) +: args :+ varS
455 )
456 val triple =
457 TupleTableAtom.create(atom.getTupleTableName, varS, IRI.RDF_TYPE, pred)
458 val triples = args.zipWithIndex
459 .map { case (a, i) =>
460 TupleTableAtom.create(atom.getTupleTableName, varS, name :: Nth(i), a)
461 }
462 (Some(skolem), triple :: triples)
322 } 463 }
464 }
465
466 val answerQuery =
467 RDFoxUtil.buildDescriptionQuery(target, RSA.ANS, query.answer.size)
323 468
324 val answerQuery = RDFoxUtil.buildDescriptionQuery("Ans", query.answer.size)
325} 469}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala
index 4a4e65c..94524be 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala
@@ -42,7 +42,7 @@ object RDFoxDSL {
42 import scala.collection.JavaConverters._ 42 import scala.collection.JavaConverters._
43 43
44 implicit class MyVariable(private val str: StringContext) extends AnyVal { 44 implicit class MyVariable(private val str: StringContext) extends AnyVal {
45 def v(args: Any*): Variable = Variable.create(s"${str.s(args: _*)}i") 45 def v(args: Any*): Variable = Variable.create(s"${str.s(args: _*)}")
46 } 46 }
47 47
48} 48}
@@ -54,8 +54,12 @@ object RevisedFilteringProgram {
54 * 54 *
55 * @param query CQ to be converted into logic rules. 55 * @param query CQ to be converted into logic rules.
56 */ 56 */
57 def apply(query: ConjunctiveQuery): RevisedFilteringProgram = 57 def apply(
58 new RevisedFilteringProgram(query) 58 source: IRI,
59 target: IRI,
60 query: ConjunctiveQuery
61 ): RevisedFilteringProgram =
62 new RevisedFilteringProgram(source, target, query)
59 63
60} 64}
61 65
@@ -66,8 +70,11 @@ object RevisedFilteringProgram {
66 * 70 *
67 * Instances can be created using the companion object. 71 * Instances can be created using the companion object.
68 */ 72 */
69class RevisedFilteringProgram(val query: ConjunctiveQuery) 73class RevisedFilteringProgram(
70 extends FilteringProgram { 74 val source: IRI,
75 val target: IRI,
76 val query: ConjunctiveQuery
77) extends FilteringProgram {
71 78
72 import RDFoxDSL._ 79 import RDFoxDSL._
73 80
@@ -76,45 +83,47 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
76 */ 83 */
77 import uk.ac.ox.cs.rsacomb.implicits.RSAAtom._ 84 import uk.ac.ox.cs.rsacomb.implicits.RSAAtom._
78 85
86 /** Simplify conversion between Java and Scala `List`s */
87 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
88
79 /** Implicit parameter used in RSA internal predicates. 89 /** Implicit parameter used in RSA internal predicates.
80 * 90 *
81 * @see [[uk.ac.ox.cs.rsacomb.util.RSA]] for more information. 91 * @see [[uk.ac.ox.cs.rsacomb.util.RSA]] for more information.
82 */ 92 */
83 implicit private[this] val _query = query 93 implicit private[this] val _query = query
84 94
85 /** Helpers */ 95 /** `TupleTableName`s for the source/targer named graphs */
96 val tts: TupleTableName = TupleTableName.create(source.getIRI)
97 val ttt: TupleTableName = TupleTableName.create(target.getIRI)
98
99 /** Set of atoms in the body of the query */
100 private val queryBody: List[TupleTableAtom] = query.atoms(tts)
86 101
87 //private def v(name: String): Term = Variable.create(s"${name}i") 102 /** Helpers */
88 private def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom) 103 private def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom)
89 104
90 private def named(x: Term): TupleTableAtom =
91 TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA.NAMED)
92 private def congruent(x: Term, y: Term): TupleTableAtom =
93 TupleTableAtom.rdf(x, RSA.CONGRUENT, y)
94 private def skolem(skolem: Term, terms: List[Term]): TupleTableAtom =
95 TupleTableAtom.create(TupleTableName.SKOLEM, (terms :+ skolem): _*)
96 private def QM(x: Term): TupleTableAtom = 105 private def QM(x: Term): TupleTableAtom =
97 TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("QM")) 106 TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.QM)
98 private def FK(x: Term): TupleTableAtom = 107 private def FK(x: Term): TupleTableAtom =
99 TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("FK")) 108 TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.FK)
100 private def SP(x: Term): TupleTableAtom = 109 private def SP(x: Term): TupleTableAtom =
101 TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("SP")) 110 TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.SP)
102 private def NI(x: Term): TupleTableAtom = 111 private def NI(x: Term): TupleTableAtom =
103 TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("NI")) 112 TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.NI)
104 private def Ans(x: Term): TupleTableAtom = 113 private def Ans(x: Term): TupleTableAtom =
105 TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("Ans")) 114 TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.ANS)
106 private def ID(x: Term, y: Term): TupleTableAtom = 115 private def ID(x: Term, y: Term): TupleTableAtom =
107 TupleTableAtom.rdf(x, RSA("ID"), y) 116 TupleTableAtom.create(ttt, x, RSA.ID, y)
108 private def AQ(suffix: RSASuffix, x: Term, y: Term): TupleTableAtom = 117 private def AQ(suffix: RSASuffix)(x: Term, y: Term): TupleTableAtom =
109 TupleTableAtom.rdf(x, RSA("AQ"), y) << suffix 118 TupleTableAtom.create(ttt, x, RSA.AQ :: suffix, y)
110 private def TQ(suffix: RSASuffix, x: Term, y: Term): TupleTableAtom = 119 private def TQ(suffix: RSASuffix)(x: Term, y: Term): TupleTableAtom =
111 TupleTableAtom.rdf(x, RSA("TQ"), y) << suffix 120 TupleTableAtom.create(ttt, x, RSA.TQ :: suffix, y)
112 121
113 /** Rule generating the instances of the predicate `rsa:NI`. 122 /** Rule generating the instances of the predicate `rsa:NI`.
114 * 123 *
115 * According to the original paper, the set of `rsa:NI` is defined as 124 * According to the original paper, the set of `rsa:NI` is defined as
116 * the set of constants that are equal (w.r.t. the congruence 125 * the set of constants that are equal (w.r.t. the congruence
117 * relation represented by `rsa:Congruent`) to a constant in the 126 * relation represented by `rsacomb:Congruent`) to a constant in the
118 * original ontology. 127 * original ontology.
119 * 128 *
120 * @note that the set of `rsa:Named` constants is always a subset of 129 * @note that the set of `rsa:Named` constants is always a subset of
@@ -125,7 +134,8 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
125 * predicate, this is not feasible, and the instances are instead 134 * predicate, this is not feasible, and the instances are instead
126 * generate in the filtering program using a logic rule. 135 * generate in the filtering program using a logic rule.
127 */ 136 */
128 val nis: Rule = Rule.create(NI(v"X"), named(v"Y"), congruent(v"X", v"Y")) 137 val nis: Rule =
138 Rule.create(NI(v"X"), RSA.Named(tts)(v"Y"), RSA.Congruent(tts)(v"X", v"Y"))
129 139
130 /** Collection of filtering program rules. */ 140 /** Collection of filtering program rules. */
131 val rules: List[Rule] = 141 val rules: List[Rule] =
@@ -138,7 +148,7 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
138 * @note corresponds to rule 1 in Table 3 in the paper. 148 * @note corresponds to rule 1 in Table 3 in the paper.
139 */ 149 */
140 val r1 = 150 val r1 =
141 Rule.create(QM(v"K"), (query.atoms :+ skolem(v"K", variables)): _*) 151 Rule.create(QM(v"K"), queryBody :+ RSA.Skolem(v"K", variables))
142 152
143 /** Initializes instances of `rsa:ID`. 153 /** Initializes instances of `rsa:ID`.
144 * 154 *
@@ -153,26 +163,26 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
153 yield Rule.create( 163 yield Rule.create(
154 ID(v"K", v"S"), 164 ID(v"K", v"S"),
155 QM(v"K"), 165 QM(v"K"),
156 skolem(v"K", variables), 166 RSA.Skolem(v"K", variables),
157 not(NI(v)), 167 not(NI(v)),
158 skolem(v"S", variables :+ RSA(i) :+ RSA(i)) 168 RSA.Skolem(v"S", variables :+ RSA(i) :+ RSA(i))
159 ) 169 )
160 val r3b = Rule.create( 170 val r3b = Rule.create(
161 ID(v"K", v"T"), 171 ID(v"K", v"T"),
162 ID(v"K", v"S"), 172 ID(v"K", v"S"),
163 skolem(v"S", variables :+ v"U" :+ v"V"), 173 RSA.Skolem(v"S", variables :+ v"U" :+ v"V"),
164 skolem(v"T", variables :+ v"V" :+ v"U") 174 RSA.Skolem(v"T", variables :+ v"V" :+ v"U")
165 ) 175 )
166 val r3c = Rule.create( 176 val r3c = Rule.create(
167 ID(v"K1", v"Q"), 177 ID(v"K1", v"Q"),
168 QM(v"K1"), 178 QM(v"K1"),
169 ID(v"K2", v"S"), 179 ID(v"K2", v"S"),
170 FilterAtom.create(FunctionCall.equal(v"K1", v"K2")), 180 FilterAtom.create(FunctionCall.equal(v"K1", v"K2")),
171 skolem(v"S", variables :+ v"U" :+ v"V"), 181 RSA.Skolem(v"S", variables :+ v"U" :+ v"V"),
172 ID(v"K3", v"T"), 182 ID(v"K3", v"T"),
173 FilterAtom.create(FunctionCall.equal(v"K1", v"K3")), 183 FilterAtom.create(FunctionCall.equal(v"K1", v"K3")),
174 skolem(v"T", variables :+ v"V" :+ v"W"), 184 RSA.Skolem(v"T", variables :+ v"V" :+ v"W"),
175 skolem(v"Q", variables :+ v"U" :+ v"W") 185 RSA.Skolem(v"Q", variables :+ v"U" :+ v"W")
176 ) 186 )
177 187
178 /** Detects forks in the canonical model. 188 /** Detects forks in the canonical model.
@@ -180,49 +190,55 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
180 * @note corresponds to rules 4x in Table 3. 190 * @note corresponds to rules 4x in Table 3.
181 */ 191 */
182 val r4a = for { 192 val r4a = for {
183 role1 <- query.atoms filter (_.isRoleAssertion) 193 role1 <- queryBody filter (_.isRoleAssertion)
184 index1 = query.bounded indexOf (role1.getArguments get 2) 194 index1 = query.bounded indexOf (role1.getArguments get 2)
185 if index1 >= 0 195 if index1 >= 0
186 role2 <- query.atoms filter (_.isRoleAssertion) 196 role2 <- queryBody filter (_.isRoleAssertion)
187 index2 = query.bounded indexOf (role2.getArguments get 2) 197 index2 = query.bounded indexOf (role2.getArguments get 2)
188 if index2 >= 0 198 if index2 >= 0
189 } yield Rule.create( 199 } yield Rule.create(
190 FK(v"K"), 200 FK(v"K"),
191 ID(v"K", v"S"), 201 ID(v"K", v"S"),
192 skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), 202 RSA.Skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)),
193 role1 << Forward, 203 role1 :: Forward,
194 role2 << Forward, 204 role2 :: Forward,
195 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 0)) 205 not(
206 RSA.Congruent(tts)(role1.getArguments get 0, role2.getArguments get 0)
207 )
196 ) 208 )
197 val r4b = for { 209 val r4b = for {
198 role1 <- query.atoms filter (_.isRoleAssertion) 210 role1 <- queryBody filter (_.isRoleAssertion)
199 index1 = query.bounded indexOf (role1.getArguments get 2) 211 index1 = query.bounded indexOf (role1.getArguments get 2)
200 if index1 >= 0 212 if index1 >= 0
201 role2 <- query.atoms filter (_.isRoleAssertion) 213 role2 <- queryBody filter (_.isRoleAssertion)
202 index2 = query.bounded indexOf (role2.getArguments get 0) 214 index2 = query.bounded indexOf (role2.getArguments get 0)
203 if index2 >= 0 215 if index2 >= 0
204 } yield Rule.create( 216 } yield Rule.create(
205 FK(v"K"), 217 FK(v"K"),
206 ID(v"K", v"S"), 218 ID(v"K", v"S"),
207 skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), 219 RSA.Skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)),
208 role1 << Forward, 220 role1 :: Forward,
209 role2 << Backward, 221 role2 :: Backward,
210 not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 2)) 222 not(
223 RSA.Congruent(tts)(role1.getArguments get 0, role2.getArguments get 2)
224 )
211 ) 225 )
212 val r4c = for { 226 val r4c = for {
213 role1 <- query.atoms filter (_.isRoleAssertion) 227 role1 <- queryBody filter (_.isRoleAssertion)
214 index1 = query.bounded indexOf (role1.getArguments get 0) 228 index1 = query.bounded indexOf (role1.getArguments get 0)
215 if index1 >= 0 229 if index1 >= 0
216 role2 <- query.atoms filter (_.isRoleAssertion) 230 role2 <- queryBody filter (_.isRoleAssertion)
217 index2 = query.bounded indexOf (role2.getArguments get 0) 231 index2 = query.bounded indexOf (role2.getArguments get 0)
218 if index2 >= 0 232 if index2 >= 0
219 } yield Rule.create( 233 } yield Rule.create(
220 FK(v"K"), 234 FK(v"K"),
221 ID(v"K", v"S"), 235 ID(v"K", v"S"),
222 skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), 236 RSA.Skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)),
223 role1 << Backward, 237 role1 :: Backward,
224 role2 << Backward, 238 role2 :: Backward,
225 not(RSA.Congruent(role1.getArguments get 2, role2.getArguments get 2)) 239 not(
240 RSA.Congruent(tts)(role1.getArguments get 2, role2.getArguments get 2)
241 )
226 ) 242 )
227 243
228 /** Recursively propagates `rsa:ID` predicate. 244 /** Recursively propagates `rsa:ID` predicate.
@@ -230,12 +246,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
230 * @note corresponds to rules 5x in Table 3. 246 * @note corresponds to rules 5x in Table 3.
231 */ 247 */
232 val r5a = for { 248 val r5a = for {
233 role1 <- query.atoms filter (_.isRoleAssertion) 249 role1 <- queryBody filter (_.isRoleAssertion)
234 r1arg0 = role1.getArguments get 0 250 r1arg0 = role1.getArguments get 0
235 if query.bounded contains r1arg0 251 if query.bounded contains r1arg0
236 r1arg2 = role1.getArguments get 2 252 r1arg2 = role1.getArguments get 2
237 if query.bounded contains r1arg2 253 if query.bounded contains r1arg2
238 role2 <- query.atoms filter (_.isRoleAssertion) 254 role2 <- queryBody filter (_.isRoleAssertion)
239 r2arg0 = role2.getArguments get 0 255 r2arg0 = role2.getArguments get 0
240 if query.bounded contains r2arg0 256 if query.bounded contains r2arg0
241 r2arg2 = role2.getArguments get 2 257 r2arg2 = role2.getArguments get 2
@@ -243,17 +259,17 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
243 } yield Rule.create( 259 } yield Rule.create(
244 ID(v"K", v"T"), 260 ID(v"K", v"T"),
245 ID(v"K", v"S"), 261 ID(v"K", v"S"),
246 skolem( 262 RSA.Skolem(
247 v"S", 263 v"S",
248 variables :+ 264 variables :+
249 RSA(query.bounded indexOf r1arg2) :+ 265 RSA(query.bounded indexOf r1arg2) :+
250 RSA(query.bounded indexOf r2arg2) 266 RSA(query.bounded indexOf r2arg2)
251 ), 267 ),
252 RSA.Congruent(r1arg0, r2arg0), 268 RSA.Congruent(tts)(r1arg0, r2arg0),
253 role1 << Forward, 269 role1 :: Forward,
254 role2 << Forward, 270 role2 :: Forward,
255 not(NI(r1arg0)), 271 not(NI(r1arg0)),
256 skolem( 272 RSA.Skolem(
257 v"T", 273 v"T",
258 variables :+ 274 variables :+
259 RSA(query.bounded indexOf r1arg0) :+ 275 RSA(query.bounded indexOf r1arg0) :+
@@ -261,12 +277,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
261 ) 277 )
262 ) 278 )
263 val r5b = for { 279 val r5b = for {
264 role1 <- query.atoms filter (_.isRoleAssertion) 280 role1 <- queryBody filter (_.isRoleAssertion)
265 r1arg0 = role1.getArguments get 0 281 r1arg0 = role1.getArguments get 0
266 if query.bounded contains r1arg0 282 if query.bounded contains r1arg0
267 r1arg2 = role1.getArguments get 2 283 r1arg2 = role1.getArguments get 2
268 if query.bounded contains r1arg2 284 if query.bounded contains r1arg2
269 role2 <- query.atoms filter (_.isRoleAssertion) 285 role2 <- queryBody filter (_.isRoleAssertion)
270 r2arg0 = role2.getArguments get 0 286 r2arg0 = role2.getArguments get 0
271 if query.bounded contains r2arg0 287 if query.bounded contains r2arg0
272 r2arg2 = role2.getArguments get 2 288 r2arg2 = role2.getArguments get 2
@@ -274,17 +290,17 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
274 } yield Rule.create( 290 } yield Rule.create(
275 ID(v"K", v"T"), 291 ID(v"K", v"T"),
276 ID(v"K", v"S"), 292 ID(v"K", v"S"),
277 skolem( 293 RSA.Skolem(
278 v"S", 294 v"S",
279 variables :+ 295 variables :+
280 RSA(query.bounded indexOf r1arg2) :+ 296 RSA(query.bounded indexOf r1arg2) :+
281 RSA(query.bounded indexOf r2arg0) 297 RSA(query.bounded indexOf r2arg0)
282 ), 298 ),
283 RSA.Congruent(r1arg0, r2arg2), 299 RSA.Congruent(tts)(r1arg0, r2arg2),
284 role1 << Forward, 300 role1 :: Forward,
285 role2 << Backward, 301 role2 :: Backward,
286 not(RSA.NI(r1arg0)), 302 not(NI(r1arg0)),
287 skolem( 303 RSA.Skolem(
288 v"T", 304 v"T",
289 variables :+ 305 variables :+
290 RSA(query.bounded indexOf r1arg0) :+ 306 RSA(query.bounded indexOf r1arg0) :+
@@ -292,12 +308,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
292 ) 308 )
293 ) 309 )
294 val r5c = for { 310 val r5c = for {
295 role1 <- query.atoms filter (_.isRoleAssertion) 311 role1 <- queryBody filter (_.isRoleAssertion)
296 r1arg0 = role1.getArguments get 0 312 r1arg0 = role1.getArguments get 0
297 if query.bounded contains r1arg0 313 if query.bounded contains r1arg0
298 r1arg2 = role1.getArguments get 2 314 r1arg2 = role1.getArguments get 2
299 if query.bounded contains r1arg2 315 if query.bounded contains r1arg2
300 role2 <- query.atoms filter (_.isRoleAssertion) 316 role2 <- queryBody filter (_.isRoleAssertion)
301 r2arg0 = role2.getArguments get 0 317 r2arg0 = role2.getArguments get 0
302 if query.bounded contains r2arg0 318 if query.bounded contains r2arg0
303 r2arg2 = role2.getArguments get 2 319 r2arg2 = role2.getArguments get 2
@@ -305,17 +321,17 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
305 } yield Rule.create( 321 } yield Rule.create(
306 ID(v"K", v"T"), 322 ID(v"K", v"T"),
307 ID(v"K", v"S"), 323 ID(v"K", v"S"),
308 skolem( 324 RSA.Skolem(
309 v"S", 325 v"S",
310 variables :+ 326 variables :+
311 RSA(query.bounded indexOf r1arg0) :+ 327 RSA(query.bounded indexOf r1arg0) :+
312 RSA(query.bounded indexOf r2arg0) 328 RSA(query.bounded indexOf r2arg0)
313 ), 329 ),
314 RSA.Congruent(r1arg2, r2arg2), 330 RSA.Congruent(tts)(r1arg2, r2arg2),
315 role1 << Backward, 331 role1 :: Backward,
316 role2 << Backward, 332 role2 :: Backward,
317 not(RSA.NI(r1arg2)), 333 not(NI(r1arg2)),
318 skolem( 334 RSA.Skolem(
319 v"T", 335 v"T",
320 variables :+ 336 variables :+
321 RSA(query.bounded indexOf r1arg2) :+ 337 RSA(query.bounded indexOf r1arg2) :+
@@ -332,38 +348,38 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
332 * @note corresponds to rules 6,7x in Table 3. 348 * @note corresponds to rules 6,7x in Table 3.
333 */ 349 */
334 val r6 = for { 350 val r6 = for {
335 role <- query.atoms filter (_.isRoleAssertion) 351 role <- queryBody filter (_.isRoleAssertion)
336 index0 = query.bounded indexOf (role.getArguments get 0) 352 index0 = query.bounded indexOf (role.getArguments get 0)
337 if index0 >= 0 353 if index0 >= 0
338 index2 = query.bounded indexOf (role.getArguments get 2) 354 index2 = query.bounded indexOf (role.getArguments get 2)
339 if index2 >= 0 355 if index2 >= 0
340 suffix <- Seq(Forward, Backward) 356 suffix <- Seq(Forward, Backward)
341 } yield Rule.create( 357 } yield Rule.create(
342 AQ(suffix, v"K1", v"Q"), 358 AQ(suffix)(v"K1", v"Q"),
343 ID(v"K1", v"S"), 359 ID(v"K1", v"S"),
344 skolem(v"S", variables :+ RSA(index0) :+ v"V"), 360 RSA.Skolem(v"S", variables :+ RSA(index0) :+ v"V"),
345 ID(v"K2", v"T"), 361 ID(v"K2", v"T"),
346 FilterAtom.create(FunctionCall.equal(v"K1", v"K2")), 362 FilterAtom.create(FunctionCall.equal(v"K1", v"K2")),
347 skolem(v"T", variables :+ RSA(index2) :+ v"W"), 363 RSA.Skolem(v"T", variables :+ RSA(index2) :+ v"W"),
348 role << suffix, 364 role :: suffix,
349 skolem(v"Q", variables :+ v"V" :+ v"W") 365 RSA.Skolem(v"Q", variables :+ v"V" :+ v"W")
350 ) 366 )
351 val r7a = 367 val r7a =
352 for (suffix <- List(Forward, Backward)) 368 for (suffix <- List(Forward, Backward))
353 yield Rule.create( 369 yield Rule.create(
354 TQ(suffix, v"K", v"S"), 370 TQ(suffix)(v"K", v"S"),
355 AQ(suffix, v"K", v"S") 371 AQ(suffix)(v"K", v"S")
356 ) 372 )
357 val r7b = 373 val r7b =
358 for (suffix <- List(Forward, Backward)) 374 for (suffix <- List(Forward, Backward))
359 yield Rule.create( 375 yield Rule.create(
360 TQ(suffix, v"K1", v"Q"), 376 TQ(suffix)(v"K1", v"Q"),
361 AQ(suffix, v"K1", v"S"), 377 AQ(suffix)(v"K1", v"S"),
362 skolem(v"S", variables :+ v"U" :+ v"V"), 378 RSA.Skolem(v"S", variables :+ v"U" :+ v"V"),
363 TQ(suffix, v"K2", v"T"), 379 TQ(suffix)(v"K2", v"T"),
364 FilterAtom.create(FunctionCall.equal(v"K1", v"K2")), 380 FilterAtom.create(FunctionCall.equal(v"K1", v"K2")),
365 skolem(v"T", variables :+ v"V" :+ v"W"), 381 RSA.Skolem(v"T", variables :+ v"V" :+ v"W"),
366 skolem(v"Q", variables :+ v"U" :+ v"W") 382 RSA.Skolem(v"Q", variables :+ v"U" :+ v"W")
367 ) 383 )
368 384
369 /** Flag spurious answers. 385 /** Flag spurious answers.
@@ -375,19 +391,16 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
375 yield Rule.create( 391 yield Rule.create(
376 SP(v"K"), 392 SP(v"K"),
377 QM(v"K"), 393 QM(v"K"),
378 skolem(v"K", variables), 394 RSA.Skolem(v"K", variables),
379 not(RSA.Named(v)) 395 not(RSA.Named(tts)(v))
380 ) 396 )
381 val r8b = Rule.create( 397 val r8b = Rule.create(SP(v"K"), FK(v"K"))
382 SP(v"K"),
383 FK(v"K")
384 )
385 val r8c = 398 val r8c =
386 for (suffix <- List(Forward, Backward)) 399 for (suffix <- List(Forward, Backward))
387 yield Rule.create( 400 yield Rule.create(
388 SP(v"K"), 401 SP(v"K"),
389 TQ(suffix, v"K", v"S"), 402 TQ(suffix)(v"K", v"S"),
390 skolem(v"S", variables :+ v"V" :+ v"V") 403 RSA.Skolem(v"S", variables :+ v"V" :+ v"V")
391 ) 404 )
392 405
393 /** Determine answers to the query 406 /** Determine answers to the query
@@ -405,11 +418,7 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
405 * 418 *
406 * @note corresponds to rule 9 in Table 3. 419 * @note corresponds to rule 9 in Table 3.
407 */ 420 */
408 val r9 = Rule.create( 421 val r9 = Rule.create(Ans(v"K"), QM(v"K"), not(SP(v"K")))
409 Ans(v"K"),
410 QM(v"K"),
411 not(SP(v"K"))
412 )
413 422
414 (r1 :: r3a ::: r3b :: r3c :: r4a ::: r4b ::: r4c ::: r5a ::: r5b ::: r5c ::: r6 ::: r7b ::: r7a ::: r8a ::: r8b :: r8c ::: r9 :: List()) 423 (r1 :: r3a ::: r3b :: r3c :: r4a ::: r4b ::: r4c ::: r5a ::: r5b ::: r5c ::: r6 ::: r7b ::: r7a ::: r8a ::: r8b :: r8c ::: r9 :: List())
415 } 424 }
@@ -422,12 +431,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery)
422 s""" 431 s"""
423 SELECT $answer 432 SELECT $answer
424 WHERE { 433 WHERE {
425 ?K a rsa:Ans . 434 GRAPH $target { ?K a ${RSA.ANS} } .
426 TT <http://oxfordsemantic.tech/RDFox#SKOLEM> { $answer $bounded ?K } . 435 TT ${TupleTableName.SKOLEM} { $answer $bounded ?K } .
427 } 436 }
428 """ 437 """
429 } else { 438 } else {
430 "ASK { ?X a rsa:Ans }" 439 s"ASK { GRAPH $target { ?X a ${RSA.ANS} } }"
431 } 440 }
432 } 441 }
433 442
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala
index d4b7876..ca77409 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala
@@ -17,6 +17,7 @@
17package uk.ac.ox.cs.rsacomb.implicits 17package uk.ac.ox.cs.rsacomb.implicits
18 18
19import tech.oxfordsemantic.jrdfox.logic.Datatype 19import tech.oxfordsemantic.jrdfox.logic.Datatype
20import tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableName
20import tech.oxfordsemantic.jrdfox.logic.expression.{ 21import tech.oxfordsemantic.jrdfox.logic.expression.{
21 BlankNode, 22 BlankNode,
22 IRI => RDFoxIRI, 23 IRI => RDFoxIRI,
@@ -47,6 +48,9 @@ object RDFox {
47 implicit def stringToRdfoxIri(iri: String): RDFoxIRI = 48 implicit def stringToRdfoxIri(iri: String): RDFoxIRI =
48 RDFoxIRI.create(iri) 49 RDFoxIRI.create(iri)
49 50
51 implicit def iriToTupleTableName(iri: RDFoxIRI): TupleTableName =
52 TupleTableName.create(iri.getIRI)
53
50 /** Converst an OWLAPI datatype into an RDFox datatype. 54 /** Converst an OWLAPI datatype into an RDFox datatype.
51 * 55 *
52 * The builtin datatypes defined by the two systems do not match 56 * The builtin datatypes defined by the two systems do not match
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala
index 09bfa1e..89777c4 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala
@@ -25,25 +25,9 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{
25} 25}
26import tech.oxfordsemantic.jrdfox.logic.expression.{IRI} 26import tech.oxfordsemantic.jrdfox.logic.expression.{IRI}
27 27
28import uk.ac.ox.cs.rsacomb.RSAUtil
29import uk.ac.ox.cs.rsacomb.RSAOntology 28import uk.ac.ox.cs.rsacomb.RSAOntology
30import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Nth} 29import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Nth}
31import uk.ac.ox.cs.rsacomb.util.RDFoxUtil 30import uk.ac.ox.cs.rsacomb.util.{DataFactory, RDFoxUtil}
32
33/* Is this the best way to determine if an atom is an RDF triple?
34 * Note that we can't use `getNumberOfArguments()` because is not
35 * "consistent":
36 * - for an atom created with `rdf(<term1>, <term2>, <term3>)`,
37 * `getNumberOfArguments` returns 3
38 * - for an atom created with `Atom.create(<tupletablename>, <term1>,
39 * <term2>, <term3>)`, `getNumberOfArguments()` returns 3
40 *
41 * This is probably because `Atom.rdf(...) is implemented as:
42 * ```scala
43 * def rdf(term1: Term, term2: Term, term3: Term): Atom =
44 * Atom.create(TupleTableName.create("rdfox:DefaultTriples"), term1, term2, term3)
45 * ```
46 */
47 31
48object RSAAtom { 32object RSAAtom {
49 33
@@ -52,55 +36,49 @@ object RSAAtom {
52 import RDFox._ 36 import RDFox._
53 import JavaCollections._ 37 import JavaCollections._
54 38
55 val name: String = atom.getTupleTableName.getName 39 val tt: TupleTableName = atom.getTupleTableName
56 40
57 val args: List[Term] = atom.getArguments 41 val args: List[Term] = atom.getArguments
58 42
59 val isRDF: Boolean = 43 val isRDF: Boolean = atom.getArguments.length == 3
60 name == "http://oxfordsemantic.tech/RDFox#DefaultTriples"
61 44
62 val isClassAssertion: Boolean = { 45 val isClassAssertion: Boolean =
63 isRDF && { 46 isRDF && atom.getArguments.get(1) == IRI.RDF_TYPE
64 val pred = atom.getArguments.get(1)
65 pred == IRI.RDF_TYPE
66 }
67 }
68 47
69 val isRoleAssertion: Boolean = isRDF && !isClassAssertion 48 val isRoleAssertion: Boolean = isRDF && !isClassAssertion
70 49
71 def <<(suffix: RSASuffix): TupleTableAtom = 50 // def <<(suffix: RSASuffix): TupleTableAtom =
72 if (isRDF) { 51 // if (isRDF) {
73 val subj = atom.getArguments.get(0) 52 // val subj = atom.getArguments.get(0)
74 val pred = atom.getArguments.get(1) 53 // val pred = atom.getArguments.get(1)
75 val obj = atom.getArguments.get(2) 54 // val obj = atom.getArguments.get(2)
76 if (isClassAssertion) { 55 // if (isClassAssertion) {
77 val obj1 = obj match { 56 // val obj1 = obj match {
78 case iri: IRI => IRI.create(iri.getIRI :: suffix) 57 // case iri: IRI => IRI.create(iri.getIRI :: suffix)
79 case other => other 58 // case other => other
80 } 59 // }
81 TupleTableAtom.rdf(subj, pred, obj1) 60 // TupleTableAtom.create(tt, subj, pred, obj1)
82 } else { 61 // } else {
83 val pred1 = pred match { 62 // val pred1 = pred match {
84 case iri: IRI => IRI.create(iri.getIRI :: suffix) 63 // case iri: IRI => IRI.create(iri.getIRI :: suffix)
85 case other => other 64 // case other => other
86 } 65 // }
87 TupleTableAtom.rdf(subj, pred1, obj) 66 // TupleTableAtom.create(tt, subj, pred1, obj)
88 } 67 // }
89 } else { 68 // } else atom
90 val ttname = TupleTableName.create(name :: suffix)
91 TupleTableAtom.create(ttname, atom.getArguments())
92 }
93 69
94 lazy val reified: (Option[TupleTableAtom], List[TupleTableAtom]) = 70 // def reified(implicit
95 if (isRDF) { 71 // fresh: DataFactory
96 (None, List(atom)) 72 // ): (Option[TupleTableAtom], List[TupleTableAtom]) =
97 } else { 73 // if (isRDF) {
98 val varS = RSAUtil.genFreshVariable() 74 // (None, List(atom))
99 val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*) 75 // } else {
100 val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name) 76 // val varS = fresh.getVariable
101 val atoms = args.zipWithIndex 77 // val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*)
102 .map { case (a, i) => TupleTableAtom.rdf(varS, name :: Nth(i), a) } 78 // val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name)
103 (Some(skolem), atom :: atoms) 79 // val atoms = args.zipWithIndex
104 } 80 // .map { case (a, i) => TupleTableAtom.rdf(varS, name :: Nth(i), a) }
81 // (Some(skolem), atom :: atoms)
82 // }
105 } 83 }
106} 84}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala
index ba44605..0aceb01 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala
@@ -29,15 +29,18 @@ import org.semanticweb.owlapi.apibinding.OWLManager
29import org.semanticweb.owlapi.model.{OWLOntology, OWLAxiom, OWLLogicalAxiom} 29import org.semanticweb.owlapi.model.{OWLOntology, OWLAxiom, OWLLogicalAxiom}
30import org.semanticweb.owlapi.model.{OWLObjectPropertyExpression} 30import org.semanticweb.owlapi.model.{OWLObjectPropertyExpression}
31import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory 31import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory
32import tech.oxfordsemantic.jrdfox.logic.datalog.Rule 32import tech.oxfordsemantic.jrdfox.logic.datalog.{Rule, TupleTableName}
33import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, Term, Variable} 33import tech.oxfordsemantic.jrdfox.logic.expression.{
34 IRI,
35 Resource,
36 Term,
37 Variable
38}
34 39
35import uk.ac.ox.cs.rsacomb.approximation.Approximation 40import uk.ac.ox.cs.rsacomb.approximation.Approximation
36import uk.ac.ox.cs.rsacomb.converter._ 41import uk.ac.ox.cs.rsacomb.converter._
37import uk.ac.ox.cs.rsacomb.suffix._ 42import uk.ac.ox.cs.rsacomb.suffix._
38import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA} 43import uk.ac.ox.cs.rsacomb.util.{DataFactory, RDFoxUtil, RSA}
39
40import uk.ac.ox.cs.rsacomb.RSAUtil
41 44
42object Ontology { 45object Ontology {
43 46
@@ -74,7 +77,7 @@ object Ontology {
74 */ 77 */
75 def dependencyGraph( 78 def dependencyGraph(
76 axioms: List[OWLLogicalAxiom], 79 axioms: List[OWLLogicalAxiom],
77 datafiles: List[File], 80 datafiles: List[os.Path],
78 unsafe: List[OWLObjectPropertyExpression] 81 unsafe: List[OWLObjectPropertyExpression]
79 ): DependencyGraph = { 82 ): DependencyGraph = {
80 83
@@ -95,12 +98,13 @@ object Ontology {
95 unsafe: List[OWLObjectPropertyExpression], 98 unsafe: List[OWLObjectPropertyExpression],
96 skolem: SkolemStrategy, 99 skolem: SkolemStrategy,
97 suffix: RSASuffix 100 suffix: RSASuffix
98 ): Shards = 101 )(implicit fresh: DataFactory): Shards =
99 (expr, skolem) match { 102 (expr, skolem) match {
100 103
101 case (e: OWLObjectSomeValuesFrom, c: Constant) => { 104 case (e: OWLObjectSomeValuesFrom, c: Constant) => {
102 nodemap.update(c.iri.getIRI, c.axiom) 105 nodemap.update(c.iri.getIRI, c.axiom)
103 val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) 106 val (res, ext) =
107 super.convert(e, term, unsafe, skolem, suffix)(fresh)
104 if (unsafe contains e.getProperty) 108 if (unsafe contains e.getProperty)
105 (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) 109 (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext)
106 else 110 else
@@ -109,19 +113,20 @@ object Ontology {
109 113
110 case (e: OWLDataSomeValuesFrom, c: Constant) => { 114 case (e: OWLDataSomeValuesFrom, c: Constant) => {
111 nodemap.update(c.iri.getIRI, c.axiom) 115 nodemap.update(c.iri.getIRI, c.axiom)
112 val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) 116 val (res, ext) =
117 super.convert(e, term, unsafe, skolem, suffix)(fresh)
113 if (unsafe contains e.getProperty) 118 if (unsafe contains e.getProperty)
114 (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) 119 (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext)
115 else 120 else
116 (RSA.PE(term, c.iri) :: res, ext) 121 (RSA.PE(term, c.iri) :: res, ext)
117 } 122 }
118 123
119 case _ => super.convert(expr, term, unsafe, skolem, suffix) 124 case _ => super.convert(expr, term, unsafe, skolem, suffix)(fresh)
120 } 125 }
121 } 126 }
122 127
123 /* Ontology convertion into LP rules */ 128 /* Ontology convertion into LP rules */
124 val term = RSAUtil.genFreshVariable() 129 val term = Variable.create("X")
125 val result = axioms.map(a => 130 val result = axioms.map(a =>
126 RSAConverter.convert(a, term, unsafe, new Constant(a), Empty) 131 RSAConverter.convert(a, term, unsafe, new Constant(a), Empty)
127 ) 132 )
@@ -143,13 +148,14 @@ object Ontology {
143 RSA.U(varY) 148 RSA.U(varY)
144 ) :: rules 149 ) :: rules
145 /* Load facts and rules from ontology */ 150 /* Load facts and rules from ontology */
146 RDFoxUtil.addFacts(data, facts) 151 val ttn = IRI.create(TupleTableName.DEFAULT_TRIPLES.getName)
152 RDFoxUtil.addFacts(data, ttn, facts)
147 RDFoxUtil.addRules(data, rules) 153 RDFoxUtil.addRules(data, rules)
148 /* Load data files */ 154 /* Load data files */
149 RDFoxUtil.addData(data, datafiles: _*) 155 RDFoxUtil.addData(data, ttn, datafiles: _*)
150 156
151 /* Build the graph */ 157 /* Build the graph */
152 val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }" 158 val query = "SELECT ?X ?Y WHERE { ?X rsacomb:E ?Y }"
153 val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get 159 val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get
154 var edges: Seq[DiEdge[Resource]] = 160 var edges: Seq[DiEdge[Resource]] =
155 answers.collect { case (_, Seq(n1, n2)) => n1 ~> n2 } 161 answers.collect { case (_, Seq(n1, n2)) => n1 ~> n2 }
@@ -161,10 +167,10 @@ object Ontology {
161 (graph, nodemap) 167 (graph, nodemap)
162 } 168 }
163 169
164 def apply(axioms: List[OWLLogicalAxiom], datafiles: List[File]): Ontology = 170 // def apply(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]): Ontology =
165 new Ontology(axioms, datafiles) 171 // new Ontology(axioms, datafiles)
166 172
167 def apply(ontology: OWLOntology, datafiles: List[File]): Ontology = { 173 def apply(ontology: OWLOntology, datafiles: List[os.Path]): Ontology = {
168 174
169 /** TBox axioms */ 175 /** TBox axioms */
170 var tbox: List[OWLLogicalAxiom] = 176 var tbox: List[OWLLogicalAxiom] =
@@ -193,11 +199,11 @@ object Ontology {
193 .collect(Collectors.toList()) 199 .collect(Collectors.toList())
194 .collect { case a: OWLLogicalAxiom => a } 200 .collect { case a: OWLLogicalAxiom => a }
195 201
196 Ontology(abox ::: tbox ::: rbox, datafiles) 202 new Ontology(ontology, abox ::: tbox ::: rbox, datafiles)
197 } 203 }
198 204
199 def apply(ontofile: File, datafiles: List[File]): Ontology = { 205 def apply(ontofile: os.Path, datafiles: List[os.Path]): Ontology = {
200 val ontology = manager.loadOntologyFromOntologyDocument(ontofile) 206 val ontology = manager.loadOntologyFromOntologyDocument(ontofile.toIO)
201 Ontology(ontology, datafiles) 207 Ontology(ontology, datafiles)
202 } 208 }
203 209
@@ -208,7 +214,11 @@ object Ontology {
208 * @param axioms list of axioms (roughly) corresponding to the TBox. 214 * @param axioms list of axioms (roughly) corresponding to the TBox.
209 * @param datafiles files containing ABox data. 215 * @param datafiles files containing ABox data.
210 */ 216 */
211class Ontology(val axioms: List[OWLLogicalAxiom], val datafiles: List[File]) { 217class Ontology(
218 val origin: OWLOntology,
219 val axioms: List[OWLLogicalAxiom],
220 val datafiles: List[os.Path]
221) {
212 222
213 /** Extend OWLAxiom functionalities */ 223 /** Extend OWLAxiom functionalities */
214 import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ 224 import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._
@@ -216,8 +226,6 @@ class Ontology(val axioms: List[OWLLogicalAxiom], val datafiles: List[File]) {
216 /** Simplify conversion between Java and Scala collections */ 226 /** Simplify conversion between Java and Scala collections */
217 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ 227 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
218 228
219 println(s"Axioms: ${axioms.length}")
220
221 /** OWLOntology based on input axioms 229 /** OWLOntology based on input axioms
222 * 230 *
223 * This is mainly used to instantiate a new reasoner to be used in 231 * This is mainly used to instantiate a new reasoner to be used in
@@ -286,6 +294,7 @@ class Ontology(val axioms: List[OWLLogicalAxiom], val datafiles: List[File]) {
286 */ 294 */
287 def normalize(normalizer: Normalizer): Ontology = 295 def normalize(normalizer: Normalizer): Ontology =
288 new Ontology( 296 new Ontology(
297 origin,
289 axioms flatMap normalizer.normalize, 298 axioms flatMap normalizer.normalize,
290 datafiles 299 datafiles
291 ) 300 )
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/package.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/package.scala
new file mode 100644
index 0000000..53fa095
--- /dev/null
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/package.scala
@@ -0,0 +1,21 @@
1/*
2 * Copyright 2020, 2021 KRR Oxford
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package uk.ac.ox.cs
18package object rsacomb {
19
20 implicit val seed: util.DataFactory = util.DataFactory(0)
21}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala
index 37a21e7..693a9af 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala
@@ -19,7 +19,7 @@ package uk.ac.ox.cs.rsacomb.sparql
19import java.util.{Map => JMap, HashMap => JHashMap} 19import java.util.{Map => JMap, HashMap => JHashMap}
20import tech.oxfordsemantic.jrdfox.Prefixes 20import tech.oxfordsemantic.jrdfox.Prefixes
21import tech.oxfordsemantic.jrdfox.client.DataStoreConnection 21import tech.oxfordsemantic.jrdfox.client.DataStoreConnection
22import tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom 22import tech.oxfordsemantic.jrdfox.logic.datalog.{TupleTableAtom, TupleTableName}
23import tech.oxfordsemantic.jrdfox.logic.expression.Variable 23import tech.oxfordsemantic.jrdfox.logic.expression.Variable
24import tech.oxfordsemantic.jrdfox.logic.sparql.pattern.{ 24import tech.oxfordsemantic.jrdfox.logic.sparql.pattern.{
25 ConjunctionPattern, 25 ConjunctionPattern,
@@ -36,8 +36,8 @@ object ConjunctiveQuery {
36 * 36 *
37 * @param query `SelectQuery` instance representing the actual query 37 * @param query `SelectQuery` instance representing the actual query
38 */ 38 */
39 def apply(query: SelectQuery): ConjunctiveQuery = 39 def apply(id: Int, query: SelectQuery): ConjunctiveQuery =
40 new ConjunctiveQuery(query) 40 new ConjunctiveQuery(id, query)
41 41
42 /** Creates a new ConjunctiveQuery from a query string 42 /** Creates a new ConjunctiveQuery from a query string
43 * 43 *
@@ -48,10 +48,11 @@ object ConjunctiveQuery {
48 * input query represents one, None is returned otherwise. 48 * input query represents one, None is returned otherwise.
49 */ 49 */
50 def parse( 50 def parse(
51 id: Int,
51 query: String, 52 query: String,
52 prefixes: Prefixes = new Prefixes() 53 prefixes: Prefixes = new Prefixes()
53 ): Option[ConjunctiveQuery] = 54 ): Option[ConjunctiveQuery] =
54 RDFoxUtil.parseSelectQuery(query, prefixes).map(ConjunctiveQuery(_)) 55 RDFoxUtil.parseSelectQuery(query, prefixes).map(ConjunctiveQuery(id, _))
55 56
56} 57}
57 58
@@ -66,6 +67,7 @@ object ConjunctiveQuery {
66 * `SelectQuery` to be considered a conjunctive query. 67 * `SelectQuery` to be considered a conjunctive query.
67 */ 68 */
68class ConjunctiveQuery( 69class ConjunctiveQuery(
70 val id: Int,
69 query: SelectQuery, 71 query: SelectQuery,
70 val prefixes: Prefixes = new Prefixes() 72 val prefixes: Prefixes = new Prefixes()
71) { 73) {
@@ -96,37 +98,54 @@ class ConjunctiveQuery(
96 val bcq: Boolean = select.isEmpty && !query.getAllPossibleVariables 98 val bcq: Boolean = select.isEmpty && !query.getAllPossibleVariables
97 99
98 /** Returns the query body as a sequence of atoms (triples). */ 100 /** Returns the query body as a sequence of atoms (triples). */
99 val atoms: List[TupleTableAtom] = 101 def atoms(graph: TupleTableName): List[TupleTableAtom] =
100 where match { 102 where
101 case b: ConjunctionPattern => { 103 .asInstanceOf[ConjunctionPattern]
102 b.getConjuncts.toList.flatMap { conj: QueryPattern => 104 .getConjuncts
103 conj match { 105 .collect { case t: TriplePattern =>
104 case c: TriplePattern => 106 TupleTableAtom.create(graph, t.getSubject, t.getPredicate, t.getObject)
105 Seq(
106 TupleTableAtom.rdf(c.getSubject, c.getPredicate, c.getObject)
107 )
108 case _ => List()
109 }
110 }
111 } 107 }
112 case _ => List() 108 // where match {
113 } 109 // case b: ConjunctionPattern => {
110 // b.getConjuncts.toList.flatMap { conj: QueryPattern =>
111 // conj match {
112 // case c: TriplePattern =>
113 // Seq(
114 // TupleTableAtom.rdf(c.getSubject, c.getPredicate, c.getObject)
115 // )
116 // case _ => List()
117 // }
118 // }
119 // }
120 // case _ => List()
121 // }
114 122
115 /** Returns the full collection of variables involved in the query. */ 123 /** Returns the full collection of variables involved in the query. */
116 val variables: List[Variable] = (where match { 124 val variables: List[Variable] =
117 case b: ConjunctionPattern => { 125 where
118 b.getConjuncts.toList.flatMap { conj: QueryPattern => 126 .asInstanceOf[ConjunctionPattern]
119 conj match { 127 .getConjuncts
120 case c: TriplePattern => 128 .collect { case t: TriplePattern =>
121 Set(c.getSubject, c.getPredicate, c.getObject).collect { 129 Set(t.getSubject, t.getPredicate, t.getObject).collect {
122 case v: Variable => v 130 case v: Variable => v
123 }
124 case _ => List()
125 } 131 }
126 } 132 }
127 } 133 .flatten
128 case _ => List() 134 .distinct
129 }).distinct 135 // (where match {
136 // case b: ConjunctionPattern => {
137 // b.getConjuncts.toList.flatMap { conj: QueryPattern =>
138 // conj match {
139 // case c: TriplePattern =>
140 // Set(c.getSubject, c.getPredicate, c.getObject).collect {
141 // case v: Variable => v
142 // }
143 // case _ => List()
144 // }
145 // }
146 // }
147 // case _ => List()
148 // }).distinct
130 149
131 /** Returns the collection of answer variables in the query. */ 150 /** Returns the collection of answer variables in the query. */
132 val answer: List[Variable] = 151 val answer: List[Variable] =
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala
index 4166655..5b97679 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala
@@ -16,6 +16,7 @@
16 16
17package uk.ac.ox.cs.rsacomb.sparql 17package uk.ac.ox.cs.rsacomb.sparql
18 18
19import ujson._
19import tech.oxfordsemantic.jrdfox.logic.expression.{ 20import tech.oxfordsemantic.jrdfox.logic.expression.{
20 IRI, 21 IRI,
21 Literal, 22 Literal,
@@ -33,19 +34,31 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{
33 * BCQs, and empty collection represents a ''false'', ''true'' otherwise. 34 * BCQs, and empty collection represents a ''false'', ''true'' otherwise.
34 */ 35 */
35class ConjunctiveQueryAnswers( 36class ConjunctiveQueryAnswers(
36 bcq: Boolean, 37 val query: ConjunctiveQuery,
37 val variables: Seq[Variable], 38 val variables: Seq[Variable],
38 val answers: Seq[(Long, Seq[Resource])] 39 val answers: Seq[(Long, Seq[Resource])]
39) { 40) {
40 41
41 /** Returns number of distinct answers. */ 42 /** Returns number of distinct answers. */
42 val length: Int = if (bcq) 0 else answers.length 43 val length: Int = if (query.bcq) 0 else answers.length
43 44
44 /** Returns number of answers taking into account multiplicity. */ 45 /** Returns number of answers taking into account multiplicity. */
45 val lengthWithMultiplicity: Long = answers.map(_._1).sum 46 val lengthWithMultiplicity: Long = answers.map(_._1).sum
46 47
48 /** Serialise answers as JSON file */
49 def toJSON(): ujson.Js.Value =
50 ujson.Obj(
51 "queryID" -> query.id,
52 "queryText" -> query.toString
53 .split('\n')
54 .map(_.trim.filter(_ >= ' '))
55 .mkString(" "),
56 "answerVariables" -> ujson.Arr(query.answer.map(_.toString())),
57 "answers" -> ujson.Arr(answers.map(_._2.mkString(" ")).sorted)
58 )
59
47 override def toString(): String = 60 override def toString(): String =
48 if (bcq) { 61 if (query.bcq) {
49 if (answers.isEmpty) "FALSE" else "TRUE" 62 if (answers.isEmpty) "FALSE" else "TRUE"
50 } else { 63 } else {
51 if (answers.isEmpty) 64 if (answers.isEmpty)
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala
index 424f2a0..282aa0b 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala
@@ -16,13 +16,13 @@
16 16
17package uk.ac.ox.cs.rsacomb.suffix 17package uk.ac.ox.cs.rsacomb.suffix
18 18
19import org.semanticweb.owlapi.model.{ 19// import org.semanticweb.owlapi.model.{
20 OWLPropertyExpression, 20// OWLPropertyExpression,
21 OWLObjectInverseOf, 21// OWLObjectInverseOf,
22 OWLObjectProperty 22// OWLObjectProperty
23} 23// }
24 24
25import tech.oxfordsemantic.jrdfox.logic.expression.{IRI} 25import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term}
26import tech.oxfordsemantic.jrdfox.logic.datalog.{TupleTableAtom, TupleTableName} 26import tech.oxfordsemantic.jrdfox.logic.datalog.{TupleTableAtom, TupleTableName}
27 27
28object RSASuffix { 28object RSASuffix {
@@ -37,7 +37,17 @@ class RSASuffix(val suffix: String => String) {
37 new RSASuffix(this.suffix andThen that.suffix) 37 new RSASuffix(this.suffix andThen that.suffix)
38 38
39 def ::(str: String): String = this suffix str 39 def ::(str: String): String = this suffix str
40 40 def ::(iri: IRI): IRI = IRI.create(this suffix iri.getIRI)
41 def ::(tta: TupleTableAtom): TupleTableAtom = {
42 val ttn: TupleTableName = tta.getTupleTableName
43 tta.getArguments match {
44 case List(subj: Term, IRI.RDF_TYPE, obj: IRI) =>
45 TupleTableAtom.create(ttn, subj, IRI.RDF_TYPE, obj :: this)
46 case List(subj: Term, pred: IRI, obj: Term) =>
47 TupleTableAtom.create(ttn, subj, pred :: this, obj)
48 case _ => tta
49 }
50 }
41} 51}
42 52
43case object Empty extends RSASuffix(identity) 53case object Empty extends RSASuffix(identity)
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala
new file mode 100644
index 0000000..848c6b5
--- /dev/null
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala
@@ -0,0 +1,29 @@
1package uk.ac.ox.cs.rsacomb.util
2
3import org.semanticweb.owlapi.apibinding.OWLManager
4import org.semanticweb.owlapi.model.OWLClass
5import tech.oxfordsemantic.jrdfox.logic.expression.Variable
6
7/** Simple fresh variable/class generator */
8object DataFactory {
9
10 /** Manager instance to interface with OWLAPI */
11 private val manager = OWLManager.createOWLOntologyManager()
12 private val factory = manager.getOWLDataFactory()
13
14 def apply(counter: Integer = -1): DataFactory = new DataFactory(counter)
15}
16
17class DataFactory(private var counter: Integer) {
18
19 private def getNext(): Integer = {
20 counter += 1
21 counter
22 }
23
24 def getVariable(): Variable =
25 Variable.create(f"I${this.getNext()}%05d")
26
27 def getOWLClass(): OWLClass =
28 DataFactory.factory.getOWLClass(s"X${this.getNext()}")
29}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
index bcb1445..a55b5a0 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala
@@ -25,6 +25,10 @@ import java.io.PrintStream
25 */ 25 */
26object Logger { 26object Logger {
27 27
28 private val time = Calendar.getInstance()
29
30 private lazy val dir = os.temp.dir(os.pwd, "rsacomb-", false)
31
28 /** Output stream for the logger. */ 32 /** Output stream for the logger. */
29 var output: PrintStream = System.out 33 var output: PrintStream = System.out
30 34
@@ -34,7 +38,7 @@ object Logger {
34 def compare(that: Level) = this.level - that.level 38 def compare(that: Level) = this.level - that.level
35 override def toString = name 39 override def toString = name
36 } 40 }
37 case object QUIET extends Level(0, "normal") 41 case object QUIET extends Level(0, "quiet")
38 case object NORMAL extends Level(1, "normal") 42 case object NORMAL extends Level(1, "normal")
39 case object DEBUG extends Level(2, "debug") 43 case object DEBUG extends Level(2, "debug")
40 case object VERBOSE extends Level(3, "verbose") 44 case object VERBOSE extends Level(3, "verbose")
@@ -42,12 +46,13 @@ object Logger {
42 /** Currend logger level */ 46 /** Currend logger level */
43 var level: Level = DEBUG 47 var level: Level = DEBUG
44 48
45 def print(str: Any, lvl: Level = NORMAL): Unit = { 49 def print(str: Any, lvl: Level = NORMAL): Unit =
46 if (lvl <= level) { 50 if (lvl <= level)
47 val time = Calendar.getInstance.getTime 51 output println s"[$lvl][${time.getTime}] $str"
48 output println s"[$lvl][$time] $str" 52
49 } 53 def write(content: => os.Source, file: String, lvl: Level = VERBOSE): Unit =
50 } 54 if (lvl <= level)
55 os.write.append(dir / file, content)
51 56
52 def timed[A](expr: => A, desc: String = "", lvl: Level = NORMAL): A = { 57 def timed[A](expr: => A, desc: String = "", lvl: Level = NORMAL): A = {
53 val t0 = System.currentTimeMillis() 58 val t0 = System.currentTimeMillis()
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala
index a9027cf..e3e7dd4 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala
@@ -17,6 +17,7 @@
17package uk.ac.ox.cs.rsacomb.util 17package uk.ac.ox.cs.rsacomb.util
18 18
19import java.io.{OutputStream, File, StringReader} 19import java.io.{OutputStream, File, StringReader}
20import scala.collection.JavaConverters._
20import tech.oxfordsemantic.jrdfox.Prefixes 21import tech.oxfordsemantic.jrdfox.Prefixes
21import tech.oxfordsemantic.jrdfox.client.{ 22import tech.oxfordsemantic.jrdfox.client.{
22 ComponentInfo, 23 ComponentInfo,
@@ -38,9 +39,11 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{
38 Literal, 39 Literal,
39 Resource, 40 Resource,
40 Variable, 41 Variable,
41 Term 42 Term,
43 IRI
42} 44}
43import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery 45import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery
46import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery
44import uk.ac.ox.cs.rsacomb.suffix.Nth 47import uk.ac.ox.cs.rsacomb.suffix.Nth
45import uk.ac.ox.cs.rsacomb.util.Logger 48import uk.ac.ox.cs.rsacomb.util.Logger
46 49
@@ -84,12 +87,29 @@ object RDFoxUtil {
84 val password = "" 87 val password = ""
85 val server = 88 val server =
86 ConnectionFactory.newServerConnection(serverUrl, role, password) 89 ConnectionFactory.newServerConnection(serverUrl, role, password)
90 opts.put("type", "par-complex-nn")
87 if (!server.containsDataStore(datastore)) 91 if (!server.containsDataStore(datastore))
88 server.createDataStore(datastore, "par-complex-nn", opts) 92 server.createDataStore(datastore, opts)
89 val data = server.newDataStoreConnection(datastore) 93 val data = server.newDataStoreConnection(datastore)
90 (server, data) 94 (server, data)
91 } 95 }
92 96
97 /** Get the IRI of a named graph (creating it if necessary)
98 *
99 * @param datastore name of the datastore to perform the action in.
100 * @param name name of the named graph.
101 *
102 * @return the full IRI for the (new) named graph.
103 */
104 def getNamedGraph(datastore: String, name: String): IRI = {
105 val graph = RSA(name)
106 val (server, data) = openConnection(datastore)
107 if (!data.containsTupleTable(graph.getIRI))
108 data.createTupleTable(graph.getIRI, Map("type" -> "named-graph").asJava)
109 RDFoxUtil.closeConnection(server, data)
110 return graph
111 }
112
93 /** Create a built-in `rdfox:SKOLEM` TupleTableAtom. */ 113 /** Create a built-in `rdfox:SKOLEM` TupleTableAtom. */
94 def skolem(name: String, terms: Term*): TupleTableAtom = 114 def skolem(name: String, terms: Term*): TupleTableAtom =
95 TupleTableAtom.create( 115 TupleTableAtom.create(
@@ -122,13 +142,14 @@ object RDFoxUtil {
122 def addRules(data: DataStoreConnection, rules: Seq[Rule]): Unit = 142 def addRules(data: DataStoreConnection, rules: Seq[Rule]): Unit =
123 Logger.timed( 143 Logger.timed(
124 if (rules.length > 0) { 144 if (rules.length > 0) {
125 data.importData( 145 data addRules rules
126 UpdateType.ADDITION, 146 // data.importData(
127 RSA.Prefixes, 147 // UpdateType.ADDITION,
128 rules 148 // RSA.Prefixes,
129 .map(_.toString(Prefixes.s_emptyPrefixes)) 149 // rules
130 .mkString("\n") 150 // .map(_.toString(Prefixes.s_emptyPrefixes))
131 ) 151 // .mkString("\n")
152 // )
132 }, 153 },
133 s"Loading ${rules.length} rules", 154 s"Loading ${rules.length} rules",
134 Logger.DEBUG 155 Logger.DEBUG
@@ -139,10 +160,15 @@ object RDFoxUtil {
139 * @param data datastore connection 160 * @param data datastore connection
140 * @param facts collection of facts to be added to the data store 161 * @param facts collection of facts to be added to the data store
141 */ 162 */
142 def addFacts(data: DataStoreConnection, facts: Seq[TupleTableAtom]): Unit = 163 def addFacts(
164 data: DataStoreConnection,
165 graph: IRI,
166 facts: Seq[TupleTableAtom]
167 ): Unit =
143 Logger.timed( 168 Logger.timed(
144 if (facts.length > 0) { 169 if (facts.length > 0) {
145 data.importData( 170 data.importData(
171 graph.getIRI,
146 UpdateType.ADDITION, 172 UpdateType.ADDITION,
147 RSA.Prefixes, 173 RSA.Prefixes,
148 facts 174 facts
@@ -157,15 +183,17 @@ object RDFoxUtil {
157 /** Imports a sequence of files directly into a datastore. 183 /** Imports a sequence of files directly into a datastore.
158 * 184 *
159 * @param data datastore connection. 185 * @param data datastore connection.
186 * @param graph named graph where the data should be uploaded
160 * @param files sequence of files to upload. 187 * @param files sequence of files to upload.
161 */ 188 */
162 def addData(data: DataStoreConnection, files: File*): Unit = 189 def addData(data: DataStoreConnection, graph: IRI, files: os.Path*): Unit =
163 Logger.timed( 190 Logger.timed(
164 files.foreach { 191 files.foreach { path =>
165 data.importData( 192 data.importData(
193 graph.getIRI,
166 UpdateType.ADDITION, 194 UpdateType.ADDITION,
167 RSA.Prefixes, 195 RSA.Prefixes,
168 _ 196 path.toIO
169 ) 197 )
170 }, 198 },
171 "Loading data files", 199 "Loading data files",
@@ -176,15 +204,6 @@ object RDFoxUtil {
176 def materialize(data: DataStoreConnection): Unit = 204 def materialize(data: DataStoreConnection): Unit =
177 Logger.timed(data.updateMaterialization(), "Materialization", Logger.DEBUG) 205 Logger.timed(data.updateMaterialization(), "Materialization", Logger.DEBUG)
178 206
179 /** Load SPARQL query from file. */
180 def loadQueryFromFile(file: File): String = {
181 val source = io.Source.fromFile(file)
182 val query = source.getLines mkString "\n"
183 Logger print s"Loaded query:\n$query"
184 source.close()
185 query
186 }
187
188 /** Export data in `text/turtle`. 207 /** Export data in `text/turtle`.
189 * 208 *
190 * @param data datastore connection from which to export data. 209 * @param data datastore connection from which to export data.
@@ -205,6 +224,50 @@ object RDFoxUtil {
205 ) 224 )
206 } 225 }
207 226
227 /** Load SPARQL queries from file.
228 *
229 * The file can list multiple queries, each preceeded with a
230 * single line containing "#^[Query<id>]" where "<id>" is a number.
231 * Empty lines are ignored.
232 *
233 * @note if a query is not recognized as a [[SelectQuery]] by RDFox
234 * it is discarded.
235 *
236 * @param file file containing a list of conjunctive queries.
237 * @param prefixes additional prefixes for the query. It defaults to
238 * an empty set.
239 *
240 * @return a list of [[tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery SelectQuery]] queries.
241 */
242 def loadQueriesFromFile(
243 path: os.Path,
244 prefixes: Prefixes = new Prefixes()
245 ): List[ConjunctiveQuery] = {
246 val header = raw"\^\[[Qq]uery(\d+)\]".r
247 val comment = "^#.*".r
248 val queries = os.read
249 .lines(path)
250 .map(_.trim.filter(_ >= ' '))
251 .filterNot(_ == "")
252 .foldRight((List.empty[Option[ConjunctiveQuery]], List.empty[String])) {
253 case (line, (acc, query)) => {
254 line match {
255 case header(id) => {
256 val cq =
257 ConjunctiveQuery.parse(id.toInt, query.mkString(" "), prefixes)
258 (cq :: acc, List.empty)
259 }
260 case comment() => (acc, query)
261 case _ => (acc, line :: query)
262 }
263 }
264 }
265 ._1
266 .collect { case Some(q) => q }
267 Logger print s"Loaded ${queries.length} queries from $path"
268 queries
269 }
270
208 /** Parse a SELECT query from a string in SPARQL format. 271 /** Parse a SELECT query from a string in SPARQL format.
209 * 272 *
210 * @param query the string containing the SPARQL query 273 * @param query the string containing the SPARQL query
@@ -282,12 +345,14 @@ object RDFoxUtil {
282 * compatible with RDFox engine. This helper allows to build a query 345 * compatible with RDFox engine. This helper allows to build a query
283 * to gather all instances of an internal predicate 346 * to gather all instances of an internal predicate
284 * 347 *
348 * @param graph named graph to query for the provided predicate
285 * @param pred name of the predicate to describe. 349 * @param pred name of the predicate to describe.
286 * @param arity arity of the predicate. 350 * @param arity arity of the predicate.
287 * @return a string containing a SPARQL query. 351 * @return a string containing a SPARQL query.
288 */ 352 */
289 def buildDescriptionQuery( 353 def buildDescriptionQuery(
290 pred: String, 354 graph: IRI,
355 pred: IRI,
291 arity: Int 356 arity: Int
292 ): String = { 357 ): String = {
293 if (arity > 0) { 358 if (arity > 0) {
@@ -295,55 +360,12 @@ object RDFoxUtil {
295 s""" 360 s"""
296 SELECT $variables 361 SELECT $variables
297 WHERE { 362 WHERE {
298 ?K a rsa:$pred. 363 GRAPH $graph { ?K a $pred }.
299 TT <http://oxfordsemantic.tech/RDFox#SKOLEM> { $variables ?K } . 364 TT ${TupleTableName.SKOLEM} { $variables ?K } .
300 } 365 }
301 """ 366 """
302 } else { 367 } else {
303 "ASK { ?X a rsa:Ans }" 368 s"ASK { GRAPH $graph { ?X a $pred } }"
304 }
305 }
306
307 /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rule]].
308 *
309 * This is needed because RDFox supports only predicates of arity 1
310 * or 2, but the filtering program uses predicates with higher arity.
311 *
312 * @note we can perform a reification of the atoms thanks to the
313 * built-in `SKOLEM` funtion of RDFox.
314 */
315 def reify(rule: Rule): Rule = {
316 val (sk, as) = rule.getHead.map(_.reified).unzip
317 val head: List[TupleTableAtom] = as.flatten
318 val skolem: List[BodyFormula] = sk.flatten
319 val body: List[BodyFormula] = rule.getBody.map(reify).flatten
320 Rule.create(head, skolem ::: body)
321 }
322
323 /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.BodyFormula BodyFormula]].
324 *
325 * This is needed because RDFox supports only predicates of arity 1
326 * or 2, but the filtering program uses predicates with higher arity.
327 *
328 * @note we can perform a reification of the atoms thanks to the
329 * built-in `SKOLEM` funtion of RDFox.
330 */
331 private def reify(formula: BodyFormula): List[BodyFormula] = {
332 formula match {
333 case atom: TupleTableAtom => atom.reified._2
334 case neg: Negation => {
335 val (sk, as) = neg.getNegatedAtoms
336 .map({
337 case a: TupleTableAtom => a.reified
338 case a => (None, List(a))
339 })
340 .unzip
341 val skolem =
342 sk.flatten.map(_.getArguments.last).collect { case v: Variable => v }
343 val atoms = as.flatten
344 List(Negation.create(skolem, atoms))
345 }
346 case other => List(other)
347 } 369 }
348 } 370 }
349 371
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala
index 8b341ba..5abb83c 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala
@@ -42,15 +42,51 @@ import scala.collection.JavaConverters._
42 42
43object RSA { 43object RSA {
44 44
45 /** Simplify conversion between Java and Scala `List`s */
46 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
47
48 /** Set of default prefixes to be included in all datastore operations */
45 val Prefixes: Prefixes = new Prefixes() 49 val Prefixes: Prefixes = new Prefixes()
46 Prefixes.declarePrefix("rsa:", "http://www.cs.ox.ac.uk/isg/rsa/") 50 Prefixes.declarePrefix("rsacomb:", "http://www.cs.ox.ac.uk/isg/RSAComb#")
51 Prefixes.declarePrefix("rdfox:", "http://oxfordsemantic.tech/RDFox#")
47 Prefixes.declarePrefix("owl:", "http://www.w3.org/2002/07/owl#") 52 Prefixes.declarePrefix("owl:", "http://www.w3.org/2002/07/owl#")
48 53
54 /** Creates a `rsacomb:<name>` IRI */
55 def apply(name: Any): IRI =
56 IRI.create(
57 //Prefixes.decodeIRI("rsacomb:") + name.toString
58 Prefixes.getPrefixIRIsByPrefixName.get("rsacomb:").getIRI + name.toString
59 )
60
61 /** Helper IRIs */
62 val ANS = RSA("Ans")
63 val AQ = RSA("AQ")
49 val CONGRUENT = RSA("congruent") 64 val CONGRUENT = RSA("congruent")
65 val FK = RSA("FK")
66 val ID = RSA("ID")
67 val IN = RSA("In")
50 val NAMED = RSA("Named") 68 val NAMED = RSA("Named")
69 val NI = RSA("NI")
70 val QM = RSA("QM")
71 val SP = RSA("SP")
72 val TQ = RSA("TQ")
73
74 def Named(tt: TupleTableName)(x: Term): TupleTableAtom =
75 TupleTableAtom.create(tt, x, IRI.RDF_TYPE, RSA.NAMED)
76 def Congruent(tt: TupleTableName)(x: Term, y: Term): TupleTableAtom =
77 TupleTableAtom.create(tt, x, RSA.CONGRUENT, y)
78 def Skolem(skolem: Term, terms: List[Term]): TupleTableAtom =
79 TupleTableAtom.create(TupleTableName.SKOLEM, terms :+ skolem)
80
81 // def In(t: Term)(implicit set: Term) =
82 // TupleTableAtom.rdf(t, RSA("In"), set)
51 83
52 private def atom(name: IRI, vars: List[Term]): TupleTableAtom = 84 // def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set))
53 TupleTableAtom.create(TupleTableName.create(name.getIRI), vars: _*) 85
86 /* TODO: review after reworking the dependency graph construction */
87
88 // private def atom(name: IRI, vars: List[Term]): TupleTableAtom =
89 // TupleTableAtom.create(TupleTableName.create(name.getIRI), vars: _*)
54 90
55 def E(t1: Term, t2: Term) = 91 def E(t1: Term, t2: Term) =
56 TupleTableAtom.rdf(t1, RSA("E"), t2) 92 TupleTableAtom.rdf(t1, RSA("E"), t2)
@@ -61,51 +97,4 @@ object RSA {
61 def U(t: Term) = 97 def U(t: Term) =
62 TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("U")) 98 TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("U"))
63 99
64 def In(t: Term)(implicit set: Term) =
65 TupleTableAtom.rdf(t, RSA("In"), set)
66
67 def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set))
68
69 def Congruent(t1: Term, t2: Term) =
70 TupleTableAtom.rdf(t1, RSA("congruent"), t2)
71
72 def QM(implicit q: ConjunctiveQuery) =
73 atom(RSA("QM"), q.answer ::: q.bounded)
74
75 def ID(t1: Term, t2: Term)(implicit q: ConjunctiveQuery) = {
76 atom(RSA("ID"), (q.answer ::: q.bounded) :+ t1 :+ t2)
77 }
78
79 def Named(t: Term) =
80 TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("Named"))
81
82 def Thing(t: Term) =
83 TupleTableAtom.rdf(t, IRI.RDF_TYPE, IRI.THING)
84
85 def NI(t: Term) =
86 TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("NI"))
87
88 def TQ(t1: Term, t2: Term, sx: RSASuffix)(implicit q: ConjunctiveQuery) =
89 atom(RSA("TQ" :: sx), (q.answer ::: q.bounded) :+ t1 :+ t2)
90
91 def AQ(t1: Term, t2: Term, sx: RSASuffix)(implicit q: ConjunctiveQuery) =
92 atom(RSA("AQ" :: sx), (q.answer ::: q.bounded) :+ t1 :+ t2)
93
94 def FK(implicit q: ConjunctiveQuery) =
95 atom(RSA("FK"), q.answer ::: q.bounded)
96
97 def SP(implicit q: ConjunctiveQuery) =
98 atom(RSA("SP"), q.answer ::: q.bounded)
99
100 def Ans(implicit q: ConjunctiveQuery) = {
101 if (q.bcq)
102 TupleTableAtom.rdf(RSA("blank"), IRI.RDF_TYPE, RSA("Ans"))
103 else
104 atom(RSA("Ans"), q.answer)
105 }
106
107 def apply(name: Any): IRI =
108 IRI.create(
109 Prefixes.getPrefixIRIsByPrefixName.get("rsa:").getIRI + name.toString
110 )
111} 100}