aboutsummaryrefslogtreecommitdiff
path: root/src/main/scala/uk/ac
diff options
context:
space:
mode:
authorFederico Igne <federico.igne@cs.ox.ac.uk>2021-04-09 12:48:17 +0100
committerFederico Igne <federico.igne@cs.ox.ac.uk>2021-04-09 12:48:17 +0100
commite6048bd85da50a8f40538b968fe7ac3b957fdd97 (patch)
treead893fdac19b84d0f1d8603527616a2c89dd54c6 /src/main/scala/uk/ac
parent6f5c82982248e823f2dd6f9eaf87f552d1616ca4 (diff)
downloadRSAComb-e6048bd85da50a8f40538b968fe7ac3b957fdd97.tar.gz
RSAComb-e6048bd85da50a8f40538b968fe7ac3b957fdd97.zip
Streamline RSA approximation
Diffstat (limited to 'src/main/scala/uk/ac')
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala100
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala182
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala6
-rw-r--r--src/main/scala/uk/ac/ox/cs/rsacomb/implicits/JavaCollections.scala6
4 files changed, 132 insertions, 162 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
index 2e7f586..ed491a9 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala
@@ -29,11 +29,9 @@ object RSAConfig {
29 -h | -? | --help 29 -h | -? | --help
30 print this help message 30 print this help message
31 31
32 --rsacheck-only
33 only perform the RSA check without performing any query answering.
34
35 -q <file> | --query <file> 32 -q <file> | --query <file>
36 path to a file containing a single SPARQL query 33 path to a file containing a single SPARQL query. If no query
34 is provided, only the approximation to RSA will be performed.
37 35
38 <ontology> 36 <ontology>
39 file containing the ontology 37 file containing the ontology
@@ -44,9 +42,7 @@ object RSAConfig {
44 """ 42 """
45 43
46 /** Default config values */ 44 /** Default config values */
47 private val default = Map( 45 private val default: Config = Map.empty
48 'rsacheckonly -> RSAOption[Boolean](false)
49 )
50 46
51 /** Utility to exit the program with a custom message on stderr. 47 /** Utility to exit the program with a custom message on stderr.
52 * 48 *
@@ -79,8 +75,6 @@ object RSAConfig {
79 println(help) 75 println(help)
80 sys.exit(0) 76 sys.exit(0)
81 } 77 }
82 case "--rsacheck-only" :: tail =>
83 parse(tail, config ++ Map('rsacheckonly -> true))
84 case flag @ ("-q" | "--query") :: _query :: tail => { 78 case flag @ ("-q" | "--query") :: _query :: tail => {
85 val query = new File(_query) 79 val query = new File(_query)
86 if (!query.isFile) 80 if (!query.isFile)
@@ -101,80 +95,34 @@ object RSAConfig {
101 } 95 }
102 96
103 /** Perform final checks on parsed options */ 97 /** Perform final checks on parsed options */
104 private def finalise(config: Config): Config = { 98 private def finalise(config: Config): Config = config
105 // Query file is mandatory unless only the RSA check is required.
106 if (!config('rsacheckonly).get[Boolean] && !config.contains('query))
107 exit(s"Query file was not provided.")
108
109 config
110 }
111} 99}
112 100
113/** Entry point of the program. 101/** Main entry point to the program */
114 *
115 * The executable expects a SPARQL query and a non-empty sequence of
116 * ontology files as arguments. The query file is expected to contain
117 * exactly one query, while the ontology files will be programmatically
118 * merged in a single ontology.
119 *
120 * @todo better arguments handling is needed. Look into some library
121 * for this.
122 * @todo at the moment the input ontology is assumed to be Horn-ALCHOIQ.
123 * This might not be the case.
124 */
125object RSAComb extends App { 102object RSAComb extends App {
126 103
104 /* Command-line options */
127 val config = RSAConfig.parse(args.toList) 105 val config = RSAConfig.parse(args.toList)
128 106
129 val ontology = 107 val ontology = RSAOntology(
130 RSAOntology(config('ontology).get[File], config('data).get[List[File]]: _*) 108 config('ontology).get[File],
131 109 config('data).get[List[File]]: _*
132 if (ontology.isRSA) { 110 )
133 111 val rsa = ontology.toRSA()
134 Logger print "Ontology is RSA!" 112 ontology.statistics()
135 113
136 if (!config('rsacheckonly).get[Boolean]) { 114 if (config contains 'query) {
137 val query = 115 val query =
138 RDFoxUtil.loadQueryFromFile(config('query).get[File].getAbsoluteFile) 116 RDFoxUtil.loadQueryFromFile(config('query).get[File].getAbsoluteFile)
139 117
140 ConjunctiveQuery.parse(query) match { 118 ConjunctiveQuery.parse(query) match {
141 case Some(query) => { 119 case Some(query) => {
142 val answers = ontology ask query 120 val answers = rsa ask query
143 //Logger.print(s"$answers", Logger.QUIET) 121 Logger.print(s"$answers", Logger.VERBOSE)
144 Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" 122 Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})"
145
146 // /* Additional DEBUG information */
147 // if (Logger.level >= Logger.DEBUG) {
148 // /* Unfiltered rules */
149 // val unfiltered = ontology askUnfiltered query
150 // unfiltered map { u =>
151 // Logger print s"Number of unfiltered answers: ${u.length} (${u.map(_._1).sum})."
152
153 // /* Spurious answers */
154 // val spurious = {
155 // val variables = query.variables.length
156 // val sp = RDFoxUtil.buildDescriptionQuery("SP", variables)
157 // ontology.queryDataStore(query, sp, RSA.Prefixes)
158 // }
159 // spurious map { s =>
160 // Logger print s"Number of spurious answers: ${s.length} (${s.map(_._1).sum})"
161
162 // /* Spurious/unfiltered percentage */
163 // val perc =
164 // if (u.length > 0) (s.length / u.length.toFloat) * 100 else 0
165 // Logger print s"Percentage of spurious answers: $perc%"
166 // }
167 // }
168 // }
169 }
170 case None =>
171 throw new RuntimeException("Submitted query is not conjunctive")
172 } 123 }
124 case None =>
125 throw new RuntimeException("Submitted query is not conjunctive")
173 } 126 }
174
175 } else {
176
177 Logger print "Ontology is not RSA!"
178
179 } 127 }
180} 128}
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
index 2e055b9..c5a2730 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala
@@ -66,66 +66,67 @@ import uk.ac.ox.cs.rsacomb.util.Logger
66 66
67object RSAOntology { 67object RSAOntology {
68 68
69 // Counter used to implement a simple fresh variable generator
70 private var counter = -1;
71
72 /** Name of the RDFox data store used for CQ answering */ 69 /** Name of the RDFox data store used for CQ answering */
73 private val DataStore = "answer_computation" 70 private val DataStore = "answer_computation"
74 71
75 def apply(ontology: File, data: File*): RSAOntology = 72 /** Simple fresh variable generator */
76 new RSAOntology(ontology, data: _*) 73 private var counter = -1;
77
78 def genFreshVariable(): Variable = { 74 def genFreshVariable(): Variable = {
79 counter += 1 75 counter += 1
80 Variable.create(f"I$counter%03d") 76 Variable.create(f"I$counter%05d")
81 } 77 }
82 78
79 /** Manager instance to interface with OWLAPI */
80 val manager = OWLManager.createOWLOntologyManager()
81
82 def apply(ontology: File, data: File*): RSAOntology =
83 new RSAOntology(
84 manager.loadOntologyFromOntologyDocument(ontology),
85 data: _*
86 )
87
88 def apply(ontology: OWLOntology, data: File*): RSAOntology =
89 new RSAOntology(ontology, data: _*)
83} 90}
84 91
85class RSAOntology(_ontology: File, val datafiles: File*) { 92/** Wrapper class for an ontology in RSA
93 *
94 * @param ontology the input OWL2 ontology.
95 * @param datafiles additinal data (treated as part of the ABox)
96 */
97class RSAOntology(val original: OWLOntology, val datafiles: File*) {
86 98
87 /** Simplify conversion between OWLAPI and RDFox concepts */ 99 /** Simplify conversion between OWLAPI and RDFox concepts */
88 import implicits.RDFox._ 100 import implicits.RDFox._
89 import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ 101 import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._
90 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ 102 import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._
91 103
92 /** Manager instance to interface with OWLAPI */ 104 /** Set of axioms removed during the approximation to RSA */
93 private val manager = OWLManager.createOWLOntologyManager() 105 private var removed: Seq[OWLAxiom] = Seq.empty
94
95 /** TBox + RBox of the input knowledge base. */
96 val ontology: OWLOntology =
97 manager.loadOntologyFromOntologyDocument(_ontology)
98
99 /** OWLAPI internal reasoner some preliminary reasoning task. */
100 private val reasoner =
101 (new StructuralReasonerFactory()).createReasoner(ontology)
102 106
107 /** The normalizer normalizes the ontology and approximate it to
108 * Horn-ALCHOIQ. A further step is needed to obtain an RSA
109 * approximation of the input ontology `original`.
110 */
103 private val normalizer = new Normalizer() 111 private val normalizer = new Normalizer()
104 112
105 /** Imported knowledge base. */
106 //lazy val kbase: OWLOntology = {
107 // val merger = new OWLOntologyMerger(manager)
108 // _data.foreach { manager.loadOntologyFromOntologyDocument(_) }
109 // merger.createMergedOntology(manager, OWLIRI.create("_:merged"))
110 //}
111
112 /** TBox axioms */ 113 /** TBox axioms */
113 val tbox: List[OWLLogicalAxiom] = 114 var tbox: List[OWLLogicalAxiom] =
114 ontology 115 original
115 .tboxAxioms(Imports.INCLUDED) 116 .tboxAxioms(Imports.INCLUDED)
116 .collect(Collectors.toList()) 117 .collect(Collectors.toList())
117 .collect { case a: OWLLogicalAxiom => a } 118 .collect { case a: OWLLogicalAxiom => a }
118 .flatMap(normalizer.normalize) 119 .flatMap(normalizer.normalize)
119 Logger.print(s"Original TBox: ${tbox.length}", Logger.DEBUG) 120 //Logger.print(s"Normalized TBox: ${tbox.length}", Logger.DEBUG)
120 121
121 /** RBox axioms */ 122 /** RBox axioms */
122 val rbox: List[OWLLogicalAxiom] = 123 var rbox: List[OWLLogicalAxiom] =
123 ontology 124 original
124 .rboxAxioms(Imports.INCLUDED) 125 .rboxAxioms(Imports.INCLUDED)
125 .collect(Collectors.toList()) 126 .collect(Collectors.toList())
126 .collect { case a: OWLLogicalAxiom => a } 127 .collect { case a: OWLLogicalAxiom => a }
127 .flatMap(normalizer.normalize) 128 .flatMap(normalizer.normalize)
128 Logger.print(s"Original RBox: ${rbox.length}", Logger.DEBUG) 129 //Logger.print(s"Normalized RBox: ${rbox.length}", Logger.DEBUG)
129 130
130 /** ABox axioms 131 /** ABox axioms
131 * 132 *
@@ -134,18 +135,27 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
134 * imported in RDFox due to performance issues when trying to import 135 * imported in RDFox due to performance issues when trying to import
135 * large data files via OWLAPI. 136 * large data files via OWLAPI.
136 */ 137 */
137 val abox: List[OWLLogicalAxiom] = 138 var abox: List[OWLLogicalAxiom] =
138 ontology 139 original
139 .aboxAxioms(Imports.INCLUDED) 140 .aboxAxioms(Imports.INCLUDED)
140 .collect(Collectors.toList()) 141 .collect(Collectors.toList())
141 .collect { case a: OWLLogicalAxiom => a } 142 .collect { case a: OWLLogicalAxiom => a }
142 .flatMap(normalizer.normalize) 143 .flatMap(normalizer.normalize)
143 Logger.print(s"Original ABox: ${abox.length}", Logger.DEBUG) 144 //Logger.print(s"Normalized ABox: ${abox.length}", Logger.DEBUG)
144 145
145 /** Collection of logical axioms in the input ontology */ 146 /** Collection of logical axioms in the input ontology */
146 lazy val axioms: List[OWLLogicalAxiom] = abox ::: tbox ::: rbox 147 var axioms: List[OWLLogicalAxiom] = abox ::: tbox ::: rbox
147 148
148 /* Retrieve individuals in the original ontology */ 149 /** Normalized Horn-ALCHOIQ ontology */
150 val ontology = RSAOntology.manager.createOntology(
151 axioms.asInstanceOf[List[OWLAxiom]].asJava
152 )
153
154 /** OWLAPI internal reasoner instantiated over the approximated ontology */
155 private val reasoner =
156 (new StructuralReasonerFactory()).createReasoner(ontology)
157
158 /** Retrieve individuals/literals in the ontology */
149 val individuals: List[IRI] = 159 val individuals: List[IRI] =
150 ontology 160 ontology
151 .getIndividualsInSignature() 161 .getIndividualsInSignature()
@@ -153,18 +163,15 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
153 .map(_.getIRI) 163 .map(_.getIRI)
154 .map(implicits.RDFox.owlapiToRdfoxIri) 164 .map(implicits.RDFox.owlapiToRdfoxIri)
155 .toList 165 .toList
156
157 val literals: List[Literal] = 166 val literals: List[Literal] =
158 axioms 167 axioms
159 .collect { case a: OWLDataPropertyAssertionAxiom => a } 168 .collect { case a: OWLDataPropertyAssertionAxiom => a }
160 .map(_.getObject) 169 .map(_.getObject)
161 .map(implicits.RDFox.owlapiToRdfoxLiteral) 170 .map(implicits.RDFox.owlapiToRdfoxLiteral)
162 171
172 /** Retrieve concepts/roles in the ontology */
163 val concepts: List[OWLClass] = 173 val concepts: List[OWLClass] =
164 ontology.getClassesInSignature().asScala.toList 174 ontology.getClassesInSignature().asScala.toList
165
166 // This is needed in the computation of rules in the canonical model.
167 // Can we avoid this using RDFox built-in functions?
168 val roles: List[OWLObjectPropertyExpression] = 175 val roles: List[OWLObjectPropertyExpression] =
169 (tbox ++ rbox) 176 (tbox ++ rbox)
170 .flatMap(_.objectPropertyExpressionsInSignature) 177 .flatMap(_.objectPropertyExpressionsInSignature)
@@ -182,26 +189,19 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
182 * if there exists a role p2 appearing in an axiom of type T4 and 189 * if there exists a role p2 appearing in an axiom of type T4 and
183 * p1 is a subproperty of either p2 or the inverse of p2. 190 * p1 is a subproperty of either p2 or the inverse of p2.
184 */ 191 */
185 lazy val unsafeRoles: List[OWLObjectPropertyExpression] = { 192 val unsafeRoles: List[OWLObjectPropertyExpression] = {
186
187 /* DEBUG: print rules in DL syntax */
188 //val renderer = new DLSyntaxObjectRenderer()
189 193
190 /* Checking for unsafety condition (1) */ 194 /* Checking for unsafety condition (1) */
191 val unsafe1 = for { 195 val unsafe1 = for {
192 axiom <- tbox 196 axiom <- tbox
193 if axiom.isT5 197 if axiom.isT5
194 role1 <- axiom.objectPropertyExpressionsInSignature 198 role1 <- axiom.objectPropertyExpressionsInSignature
195 roleSuper = 199 roleSuper = role1 +: reasoner.superObjectProperties(role1)
196 role1 +: reasoner
197 .superObjectProperties(role1)
198 .collect(Collectors.toList())
199 .asScala
200 roleSuperInv = roleSuper.map(_.getInverseProperty) 200 roleSuperInv = roleSuper.map(_.getInverseProperty)
201 axiom <- tbox 201 axiom <- tbox
202 if axiom.isT3 && !axiom.isT3top 202 if axiom.isT3 && !axiom.isT3top
203 role2 <- axiom.objectPropertyExpressionsInSignature 203 role2 <- axiom.objectPropertyExpressionsInSignature
204 if roleSuperInv.contains(role2) 204 if roleSuperInv contains role2
205 } yield role1 205 } yield role1
206 206
207 /* Checking for unsafety condition (2) */ 207 /* Checking for unsafety condition (2) */
@@ -209,11 +209,7 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
209 axiom <- tbox 209 axiom <- tbox
210 if axiom.isT5 210 if axiom.isT5
211 role1 <- axiom.objectPropertyExpressionsInSignature 211 role1 <- axiom.objectPropertyExpressionsInSignature
212 roleSuper = 212 roleSuper = role1 +: reasoner.superObjectProperties(role1)
213 role1 +: reasoner
214 .superObjectProperties(role1)
215 .collect(Collectors.toList())
216 .asScala
217 roleSuperInv = roleSuper.map(_.getInverseProperty) 213 roleSuperInv = roleSuper.map(_.getInverseProperty)
218 axiom <- tbox 214 axiom <- tbox
219 if axiom.isT4 215 if axiom.isT4
@@ -221,16 +217,19 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
221 if roleSuper.contains(role2) || roleSuperInv.contains(role2) 217 if roleSuper.contains(role2) || roleSuperInv.contains(role2)
222 } yield role1 218 } yield role1
223 219
224 (unsafe1 ++ unsafe2).toList 220 unsafe1 ++ unsafe2
225 } 221 }
226 222
227 /** RSA dependency graph 223 /** Compute the RSA dependency graph
224 *
225 * This is used to approximate the input ontology to RSA.
228 * 226 *
229 * This is used to check whether the input ontology is RSA. This also 227 * @return a tuple containing the dependency graph and a map between
230 * helps us determine a suitable approximation of the ontology to 228 * the constants newly introduced and the corresponding axioms in the
231 * RSA. 229 * ontology.
232 */ 230 */
233 private lazy val dependencyGraph: Graph[Resource, DiEdge] = { 231 private def dependencyGraph()
232 : (Graph[Resource, DiEdge], Map[String, OWLAxiom]) = {
234 val unsafe = this.unsafeRoles 233 val unsafe = this.unsafeRoles
235 var nodemap = Map.empty[String, OWLAxiom] 234 var nodemap = Map.empty[String, OWLAxiom]
236 235
@@ -278,7 +277,7 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
278 var rules = datalog._2.flatten 277 var rules = datalog._2.flatten
279 278
280 /* Open connection with RDFox */ 279 /* Open connection with RDFox */
281 val (server, data) = RDFoxUtil.openConnection("RSACheck") 280 val (server, data) = RDFoxUtil.openConnection("rsa_dependency_graph")
282 281
283 /* Add additional built-in rules */ 282 /* Add additional built-in rules */
284 val varX = Variable.create("X") 283 val varX = Variable.create("X")
@@ -289,14 +288,13 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
289 RSA.U(varX), 288 RSA.U(varX),
290 RSA.U(varY) 289 RSA.U(varY)
291 ) :: rules 290 ) :: rules
292
293 /* Load facts and rules from ontology */ 291 /* Load facts and rules from ontology */
294 RDFoxUtil.addFacts(data, facts) 292 RDFoxUtil.addFacts(data, facts)
295 RDFoxUtil.addRules(data, rules) 293 RDFoxUtil.addRules(data, rules)
296 /* Load data files */ 294 /* Load data files */
297 RDFoxUtil.addData(data, datafiles: _*) 295 RDFoxUtil.addData(data, datafiles: _*)
298 296
299 /* Build graph */ 297 /* Build the graph */
300 val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }" 298 val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }"
301 val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get 299 val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get
302 var edges: Seq[DiEdge[Resource]] = 300 var edges: Seq[DiEdge[Resource]] =
@@ -306,10 +304,7 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
306 /* Close connection to RDFox */ 304 /* Close connection to RDFox */
307 RDFoxUtil.closeConnection(server, data) 305 RDFoxUtil.closeConnection(server, data)
308 306
309 /* Approximate the ontology to RSA */ 307 (graph, nodemap)
310 approximate(graph, nodemap)
311
312 graph
313 } 308 }
314 309
315 /** Approximate a Horn-ALCHOIQ ontology to RSA 310 /** Approximate a Horn-ALCHOIQ ontology to RSA
@@ -321,10 +316,10 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
321 * @param graph the graph used to compute the axioms to remove. 316 * @param graph the graph used to compute the axioms to remove.
322 * @param nodemap map from graph nodes to ontology axioms. 317 * @param nodemap map from graph nodes to ontology axioms.
323 */ 318 */
324 private def approximate( 319 def toRSA(): RSAOntology = {
325 graph: Graph[Resource, DiEdge], 320
326 nodemap: Map[String, OWLAxiom] 321 /* Compute the dependency graph for the ontology */
327 ): Unit = { 322 val (graph, nodemap) = this.dependencyGraph()
328 323
329 /* Define node colors for the graph visit */ 324 /* Define node colors for the graph visit */
330 sealed trait NodeColor 325 sealed trait NodeColor
@@ -364,7 +359,13 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
364 val toDelete = color.iterator.collect { case (resource: IRI, ToDelete) => 359 val toDelete = color.iterator.collect { case (resource: IRI, ToDelete) =>
365 nodemap(resource.getIRI) 360 nodemap(resource.getIRI)
366 }.toSeq 361 }.toSeq
362
363 /* Remove axioms from approximated ontology */
367 ontology.removeAxioms(toDelete: _*) 364 ontology.removeAxioms(toDelete: _*)
365 this.removed = toDelete
366
367 /* Return RSA ontology */
368 RSAOntology(ontology, datafiles: _*)
368 } 369 }
369 // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> 370 // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~>
370 // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F, 371 // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F,
@@ -373,16 +374,6 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
373 // val edges3 = Seq('P ~> 'O) 374 // val edges3 = Seq('P ~> 'O)
374 // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3) 375 // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3)
375 376
376 /** RSA check
377 *
378 * Acyclicity check over *undirected* dependency graph.
379 * NOTE: at the moment we are using the direct version of the graph.
380 *
381 * @deprecated
382 */
383 lazy val isRSA: Boolean =
384 Logger.timed(dependencyGraph.isAcyclic, "RSA check", Logger.DEBUG)
385
386 /** Top axiomatization rules 377 /** Top axiomatization rules
387 * 378 *
388 * For each concept/role *in the ontology file* introduce a rule to 379 * For each concept/role *in the ontology file* introduce a rule to
@@ -687,4 +678,29 @@ class RSAOntology(_ontology: File, val datafiles: File*) {
687 def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = 678 def unfold(axiom: OWLSubClassOfAxiom): Set[Term] =
688 this.self(axiom) | this.cycle(axiom) 679 this.self(axiom) | this.cycle(axiom)
689 680
690} // implicit class RSAOntology 681 /** Log normalization/approximation statistics */
682 def statistics(level: Logger.Level = Logger.DEBUG): Unit = {
683 Logger.print(
684 s"Logical axioms in original input ontology: ${original.getLogicalAxiomCount(true)}",
685 level
686 )
687 Logger.print(
688 s"Logical axioms discarded in Horn-ALCHOIQ approximation: ${normalizer.discarded}",
689 level
690 )
691 Logger.print(
692 s"Logical axioms shifted in Horn-ALCHOIQ approximation: ${normalizer.shifted}",
693 level
694 )
695 Logger.print(
696 s"Logical axioms in Horn-ALCHOIQ ontology: ${ontology
697 .getLogicalAxiomCount(true)} (${tbox.length}/${rbox.length}/${abox.length})",
698 level
699 )
700 Logger.print(
701 s"Logical axioms discarded in RSA approximation: ${removed.length}",
702 level
703 )
704 }
705
706} // class RSAOntology
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala
index fe81312..205c369 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala
@@ -32,8 +32,8 @@ class Normalizer() {
32 } 32 }
33 33
34 /** Statistics */ 34 /** Statistics */
35 private var discarted = 0 35 var discarded = 0
36 private var shifted = 0 36 var shifted = 0
37 37
38 /** Normalizes a 38 /** Normalizes a
39 * [[org.semanticweb.owlapi.model.OWLLogicalAxiom OWLLogicalAxiom]] 39 * [[org.semanticweb.owlapi.model.OWLLogicalAxiom OWLLogicalAxiom]]
@@ -576,7 +576,7 @@ class Normalizer() {
576 axiom: OWLLogicalAxiom 576 axiom: OWLLogicalAxiom
577 ): Seq[OWLLogicalAxiom] = { 577 ): Seq[OWLLogicalAxiom] = {
578 /* Update statistics */ 578 /* Update statistics */
579 discarted += 1 579 discarded += 1
580 Logger print s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ" 580 Logger print s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ"
581 Seq() 581 Seq()
582 } 582 }
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/JavaCollections.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/JavaCollections.scala
index 8c513fd..ad7b07a 100644
--- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/JavaCollections.scala
+++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/JavaCollections.scala
@@ -1,5 +1,6 @@
1package uk.ac.ox.cs.rsacomb.implicits 1package uk.ac.ox.cs.rsacomb.implicits
2 2
3import java.util.stream.{Collectors, Stream}
3import scala.collection.JavaConverters._ 4import scala.collection.JavaConverters._
4 5
5object JavaCollections { 6object JavaCollections {
@@ -13,6 +14,11 @@ object JavaCollections {
13 implicit def javaSetToScalaList[A](set: java.util.Set[A]): List[A] = 14 implicit def javaSetToScalaList[A](set: java.util.Set[A]): List[A] =
14 set.asScala.toList 15 set.asScala.toList
15 16
17 implicit def javaStreamToScalaSeq[A](
18 stream: java.util.stream.Stream[A]
19 ): Seq[A] =
20 stream.collect(Collectors.toList()).asScala.toSeq
21
16 implicit def javaCollectionToScalaList[A]( 22 implicit def javaCollectionToScalaList[A](
17 set: java.util.Collection[A] 23 set: java.util.Collection[A]
18 ): List[A] = 24 ): List[A] =