diff options
| author | Federico Igne <git@federicoigne.com> | 2021-07-27 10:34:57 +0100 |
|---|---|---|
| committer | Federico Igne <git@federicoigne.com> | 2021-07-27 10:34:57 +0100 |
| commit | d017662e2d65ec72e7decde3b76591c198da9819 (patch) | |
| tree | 57193f145cb39223db0b0da6055556aca7d04622 | |
| parent | c597b5efbe9e351a4313ef8fc1215f9e188b1ffd (diff) | |
| parent | 7d619706551117a485d93d0d6847a25afa6a359d (diff) | |
| download | RSAComb-d017662e2d65ec72e7decde3b76591c198da9819.tar.gz RSAComb-d017662e2d65ec72e7decde3b76591c198da9819.zip | |
Merge branch 'approximation'v0.2.0
9 files changed, 886 insertions, 415 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala index b949ccb..ca54054 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala | |||
| @@ -92,8 +92,8 @@ class CanonicalModel(val ontology: RSAOntology) { | |||
| 92 | val (facts, rules): (List[TupleTableAtom], List[Rule]) = { | 92 | val (facts, rules): (List[TupleTableAtom], List[Rule]) = { |
| 93 | // Compute rules from ontology axioms | 93 | // Compute rules from ontology axioms |
| 94 | val (facts, rules) = { | 94 | val (facts, rules) = { |
| 95 | val term = RSAOntology.genFreshVariable() | 95 | val term = RSAUtil.genFreshVariable() |
| 96 | val unsafe = ontology.unsafeRoles | 96 | val unsafe = ontology.unsafe |
| 97 | ontology.axioms | 97 | ontology.axioms |
| 98 | .map(a => | 98 | .map(a => |
| 99 | CanonicalModelConverter.convert(a, term, unsafe, Constant(a), Empty) | 99 | CanonicalModelConverter.convert(a, term, unsafe, Constant(a), Empty) |
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index 231d2b8..258c226 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | |||
| @@ -26,6 +26,10 @@ import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery | |||
| 26 | import util.{Logger, RDFoxUtil, RSA} | 26 | import util.{Logger, RDFoxUtil, RSA} |
| 27 | import sparql.ConjunctiveQuery | 27 | import sparql.ConjunctiveQuery |
| 28 | 28 | ||
| 29 | import uk.ac.ox.cs.rsacomb.ontology.Ontology | ||
| 30 | import uk.ac.ox.cs.rsacomb.converter.Normalizer | ||
| 31 | import uk.ac.ox.cs.rsacomb.approximation.LowerBound | ||
| 32 | |||
| 29 | case class RSAOption[+T](opt: T) { | 33 | case class RSAOption[+T](opt: T) { |
| 30 | def get[T]: T = opt.asInstanceOf[T] | 34 | def get[T]: T = opt.asInstanceOf[T] |
| 31 | } | 35 | } |
| @@ -122,12 +126,15 @@ object RSAComb extends App { | |||
| 122 | /* Command-line options */ | 126 | /* Command-line options */ |
| 123 | val config = RSAConfig.parse(args.toList) | 127 | val config = RSAConfig.parse(args.toList) |
| 124 | 128 | ||
| 125 | val ontology = RSAOntology( | 129 | /* Load original ontology and normalize it */ |
| 130 | val ontology = Ontology( | ||
| 126 | config('ontology).get[File], | 131 | config('ontology).get[File], |
| 127 | config('data).get[List[File]]: _* | 132 | config('data).get[List[File]] |
| 128 | ) | 133 | ).normalize(new Normalizer) |
| 129 | val rsa = ontology.toRSA() | 134 | |
| 130 | ontology.statistics() | 135 | /* Approximate the ontology to RSA */ |
| 136 | val toRSA = new LowerBound | ||
| 137 | val rsa = ontology approximate toRSA | ||
| 131 | 138 | ||
| 132 | if (config contains 'query) { | 139 | if (config contains 'query) { |
| 133 | val query = | 140 | val query = |
| @@ -140,18 +147,18 @@ object RSAComb extends App { | |||
| 140 | Logger.print(s"$answers", Logger.VERBOSE) | 147 | Logger.print(s"$answers", Logger.VERBOSE) |
| 141 | Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" | 148 | Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" |
| 142 | // Retrieve unfiltered answers | 149 | // Retrieve unfiltered answers |
| 143 | val unfiltered = rsa.queryDataStore( | 150 | // val unfiltered = rsa.queryDataStore( |
| 144 | """ | 151 | // """ |
| 145 | SELECT (count(?K) as ?COUNT) | 152 | // SELECT (count(?K) as ?COUNT) |
| 146 | WHERE { | 153 | // WHERE { |
| 147 | ?K a rsa:QM . | 154 | // ?K a rsa:QM . |
| 148 | } | 155 | // } |
| 149 | """, | 156 | // """, |
| 150 | RSA.Prefixes | 157 | // RSA.Prefixes |
| 151 | ) | 158 | // ) |
| 152 | unfiltered.foreach((u) => | 159 | // unfiltered.foreach((u) => |
| 153 | Logger print s"Number of unfiltered answers: ${u.head._2}" | 160 | // Logger print s"Number of unfiltered answers: ${u.head._2}" |
| 154 | ) | 161 | // ) |
| 155 | } | 162 | } |
| 156 | case None => | 163 | case None => |
| 157 | throw new RuntimeException("Submitted query is not conjunctive") | 164 | throw new RuntimeException("Submitted query is not conjunctive") |
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 3e10697..30e1305 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | |||
| @@ -66,117 +66,159 @@ import scala.collection.JavaConverters._ | |||
| 66 | import scala.collection.mutable.{Set, Map} | 66 | import scala.collection.mutable.{Set, Map} |
| 67 | import scalax.collection.Graph | 67 | import scalax.collection.Graph |
| 68 | import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ | 68 | import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ |
| 69 | import scalax.collection.GraphTraversal._ | ||
| 70 | 69 | ||
| 71 | /* Debug only */ | 70 | /* Debug only */ |
| 72 | import org.semanticweb.owlapi.dlsyntax.renderer.DLSyntaxObjectRenderer | 71 | import org.semanticweb.owlapi.dlsyntax.renderer.DLSyntaxObjectRenderer |
| 73 | import tech.oxfordsemantic.jrdfox.logic._ | 72 | import tech.oxfordsemantic.jrdfox.logic._ |
| 74 | import org.semanticweb.owlapi.model.OWLObjectInverseOf | 73 | import org.semanticweb.owlapi.model.OWLObjectInverseOf |
| 75 | 74 | ||
| 75 | import uk.ac.ox.cs.rsacomb.approximation.Approximation | ||
| 76 | import uk.ac.ox.cs.rsacomb.converter._ | 76 | import uk.ac.ox.cs.rsacomb.converter._ |
| 77 | import uk.ac.ox.cs.rsacomb.filtering.{FilteringProgram, FilterType} | 77 | import uk.ac.ox.cs.rsacomb.filtering.{FilteringProgram, FilterType} |
| 78 | import uk.ac.ox.cs.rsacomb.suffix._ | 78 | import uk.ac.ox.cs.rsacomb.suffix._ |
| 79 | import uk.ac.ox.cs.rsacomb.sparql._ | 79 | import uk.ac.ox.cs.rsacomb.sparql._ |
| 80 | import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA} | 80 | import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA} |
| 81 | import uk.ac.ox.cs.rsacomb.util.Logger | 81 | import uk.ac.ox.cs.rsacomb.util.Logger |
| 82 | import uk.ac.ox.cs.rsacomb.ontology.Ontology | ||
| 82 | 83 | ||
| 83 | object RSAOntology { | 84 | object RSAUtil { |
| 84 | 85 | ||
| 85 | /** Name of the RDFox data store used for CQ answering */ | 86 | // implicit def axiomsToOntology(axioms: Seq[OWLAxiom]) = { |
| 86 | private val DataStore = "answer_computation" | 87 | // val manager = OWLManager.createOWLOntologyManager() |
| 88 | // manager.createOntology(axioms.asJava) | ||
| 89 | // } | ||
| 87 | 90 | ||
| 88 | /** Simple fresh variable generator */ | 91 | /** Manager instance to interface with OWLAPI */ |
| 92 | val manager = OWLManager.createOWLOntologyManager() | ||
| 93 | val factory = manager.getOWLDataFactory() | ||
| 94 | |||
| 95 | /** Simple fresh variable/class generator */ | ||
| 89 | private var counter = -1; | 96 | private var counter = -1; |
| 90 | def genFreshVariable(): Variable = { | 97 | def genFreshVariable(): Variable = { |
| 91 | counter += 1 | 98 | counter += 1 |
| 92 | Variable.create(f"I$counter%05d") | 99 | Variable.create(f"I$counter%05d") |
| 93 | } | 100 | } |
| 101 | def getFreshOWLClass(): OWLClass = { | ||
| 102 | counter += 1 | ||
| 103 | factory.getOWLClass(s"X$counter") | ||
| 104 | } | ||
| 105 | |||
| 106 | } | ||
| 107 | |||
| 108 | object RSAOntology { | ||
| 109 | |||
| 110 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | ||
| 94 | 111 | ||
| 95 | /** Manager instance to interface with OWLAPI */ | 112 | /** Manager instance to interface with OWLAPI */ |
| 96 | val manager = OWLManager.createOWLOntologyManager() | 113 | val manager = OWLManager.createOWLOntologyManager() |
| 97 | 114 | ||
| 98 | def apply(ontology: File, data: File*): RSAOntology = | 115 | /** Name of the RDFox data store used for CQ answering */ |
| 99 | new RSAOntology( | 116 | private val DataStore = "answer_computation" |
| 100 | manager.loadOntologyFromOntologyDocument(ontology), | 117 | |
| 101 | data: _* | 118 | /** Filtering program for a given query |
| 119 | * | ||
| 120 | * @param query the query to derive the filtering program | ||
| 121 | * @return the filtering program for the given query | ||
| 122 | */ | ||
| 123 | def filteringProgram(query: ConjunctiveQuery): FilteringProgram = | ||
| 124 | Logger.timed( | ||
| 125 | FilteringProgram(FilterType.REVISED)(query), | ||
| 126 | "Generating filtering program", | ||
| 127 | Logger.DEBUG | ||
| 102 | ) | 128 | ) |
| 103 | 129 | ||
| 104 | def apply(ontology: OWLOntology, data: File*): RSAOntology = | 130 | def apply( |
| 105 | new RSAOntology(ontology, data: _*) | 131 | axioms: List[OWLLogicalAxiom], |
| 132 | datafiles: List[File] | ||
| 133 | ): RSAOntology = new RSAOntology(axioms, datafiles) | ||
| 134 | |||
| 135 | // def apply( | ||
| 136 | // ontofile: File, | ||
| 137 | // datafiles: List[File], | ||
| 138 | // approx: Option[Approximation] | ||
| 139 | // ): RSAOntology = { | ||
| 140 | // val ontology = manager.loadOntologyFromOntologyDocument(ontofile) | ||
| 141 | // RSAOntology(ontology, datafiles, approx) | ||
| 142 | // } | ||
| 143 | |||
| 144 | // def apply( | ||
| 145 | // ontology: OWLOntology, | ||
| 146 | // datafiles: List[File], | ||
| 147 | // approx: Option[Approximation] | ||
| 148 | // ): RSAOntology = { | ||
| 149 | // val normalizer = new Normalizer() | ||
| 150 | |||
| 151 | // /** TBox axioms */ | ||
| 152 | // var tbox: List[OWLLogicalAxiom] = | ||
| 153 | // ontology | ||
| 154 | // .tboxAxioms(Imports.INCLUDED) | ||
| 155 | // .collect(Collectors.toList()) | ||
| 156 | // .collect { case a: OWLLogicalAxiom => a } | ||
| 157 | // .flatMap(normalizer.normalize) | ||
| 158 | |||
| 159 | // /** RBox axioms */ | ||
| 160 | // var rbox: List[OWLLogicalAxiom] = | ||
| 161 | // ontology | ||
| 162 | // .rboxAxioms(Imports.INCLUDED) | ||
| 163 | // .collect(Collectors.toList()) | ||
| 164 | // .collect { case a: OWLLogicalAxiom => a } | ||
| 165 | // .flatMap(normalizer.normalize) | ||
| 166 | |||
| 167 | // /** ABox axioms | ||
| 168 | // * | ||
| 169 | // * @note this represents only the set of assertions contained in the | ||
| 170 | // * ontology file. Data files specified in `datafiles` are directly | ||
| 171 | // * imported in RDFox due to performance issues when trying to import | ||
| 172 | // * large data files via OWLAPI. | ||
| 173 | // */ | ||
| 174 | // var abox: List[OWLLogicalAxiom] = | ||
| 175 | // ontology | ||
| 176 | // .aboxAxioms(Imports.INCLUDED) | ||
| 177 | // .collect(Collectors.toList()) | ||
| 178 | // .collect { case a: OWLLogicalAxiom => a } | ||
| 179 | // .flatMap(normalizer.normalize) | ||
| 180 | |||
| 181 | // /** Collection of logical axioms in the input ontology */ | ||
| 182 | // var axioms: List[OWLLogicalAxiom] = abox ::: tbox ::: rbox | ||
| 183 | |||
| 184 | // new RSAOntology( | ||
| 185 | // approx match { | ||
| 186 | // case Some(a) => a.approximate(axioms, datafiles) | ||
| 187 | // case None => axioms | ||
| 188 | // }, | ||
| 189 | // datafiles: _* | ||
| 190 | // ) | ||
| 191 | // } | ||
| 192 | |||
| 106 | } | 193 | } |
| 107 | 194 | ||
| 108 | /** Wrapper class for an ontology in RSA | 195 | /** A wrapper for an RSA ontology |
| 109 | * | 196 | * |
| 110 | * @param ontology the input OWL2 ontology. | 197 | * @param ontology the input OWL2 ontology. |
| 111 | * @param datafiles additinal data (treated as part of the ABox) | 198 | * @param datafiles additinal data (treated as part of the ABox) |
| 112 | */ | 199 | */ |
| 113 | class RSAOntology(val original: OWLOntology, val datafiles: File*) { | 200 | class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) |
| 201 | extends Ontology(axioms, datafiles) { | ||
| 114 | 202 | ||
| 115 | /** Simplify conversion between OWLAPI and RDFox concepts */ | 203 | /** Simplify conversion between OWLAPI and RDFox concepts */ |
| 116 | import implicits.RDFox._ | 204 | import implicits.RDFox._ |
| 117 | import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ | 205 | import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ |
| 206 | |||
| 207 | /** Simplify conversion between Java and Scala collections */ | ||
| 118 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | 208 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ |
| 119 | 209 | ||
| 120 | /** Set of axioms removed during the approximation to RSA */ | 210 | /** Set of axioms removed during the approximation to RSA */ |
| 121 | private var removed: Seq[OWLAxiom] = Seq.empty | 211 | //private var removed: Seq[OWLAxiom] = Seq.empty |
| 122 | |||
| 123 | /** The normalizer normalizes the ontology and approximate it to | ||
| 124 | * Horn-ALCHOIQ. A further step is needed to obtain an RSA | ||
| 125 | * approximation of the input ontology `original`. | ||
| 126 | */ | ||
| 127 | private val normalizer = new Normalizer() | ||
| 128 | |||
| 129 | /** TBox axioms */ | ||
| 130 | var tbox: List[OWLLogicalAxiom] = | ||
| 131 | original | ||
| 132 | .tboxAxioms(Imports.INCLUDED) | ||
| 133 | .collect(Collectors.toList()) | ||
| 134 | .collect { case a: OWLLogicalAxiom => a } | ||
| 135 | .flatMap(normalizer.normalize) | ||
| 136 | |||
| 137 | /** RBox axioms */ | ||
| 138 | var rbox: List[OWLLogicalAxiom] = | ||
| 139 | original | ||
| 140 | .rboxAxioms(Imports.INCLUDED) | ||
| 141 | .collect(Collectors.toList()) | ||
| 142 | .collect { case a: OWLLogicalAxiom => a } | ||
| 143 | .flatMap(normalizer.normalize) | ||
| 144 | |||
| 145 | /** ABox axioms | ||
| 146 | * | ||
| 147 | * @note this represents only the set of assertions contained in the | ||
| 148 | * ontology file. Data files specified in `datafiles` are directly | ||
| 149 | * imported in RDFox due to performance issues when trying to import | ||
| 150 | * large data files via OWLAPI. | ||
| 151 | */ | ||
| 152 | var abox: List[OWLLogicalAxiom] = | ||
| 153 | original | ||
| 154 | .aboxAxioms(Imports.INCLUDED) | ||
| 155 | .collect(Collectors.toList()) | ||
| 156 | .collect { case a: OWLLogicalAxiom => a } | ||
| 157 | .flatMap(normalizer.normalize) | ||
| 158 | |||
| 159 | /** Collection of logical axioms in the input ontology */ | ||
| 160 | var axioms: List[OWLLogicalAxiom] = abox ::: tbox ::: rbox | ||
| 161 | |||
| 162 | /** Normalized Horn-ALCHOIQ ontology */ | ||
| 163 | val ontology = RSAOntology.manager.createOntology( | ||
| 164 | axioms.asInstanceOf[List[OWLAxiom]].asJava | ||
| 165 | ) | ||
| 166 | |||
| 167 | /** OWLAPI internal reasoner instantiated over the approximated ontology */ | ||
| 168 | private val reasoner = | ||
| 169 | (new StructuralReasonerFactory()).createReasoner(ontology) | ||
| 170 | 212 | ||
| 171 | /** Retrieve individuals/literals in the ontology */ | 213 | /** Retrieve individuals/literals in the ontology */ |
| 172 | val individuals: List[IRI] = | 214 | private val individuals: List[IRI] = |
| 173 | ontology | 215 | ontology |
| 174 | .getIndividualsInSignature() | 216 | .getIndividualsInSignature() |
| 175 | .asScala | 217 | .asScala |
| 176 | .map(_.getIRI) | 218 | .map(_.getIRI) |
| 177 | .map(implicits.RDFox.owlapiToRdfoxIri) | 219 | .map(implicits.RDFox.owlapiToRdfoxIri) |
| 178 | .toList | 220 | .toList |
| 179 | val literals: List[Literal] = | 221 | private val literals: List[Literal] = |
| 180 | axioms | 222 | axioms |
| 181 | .collect { case a: OWLDataPropertyAssertionAxiom => a } | 223 | .collect { case a: OWLDataPropertyAssertionAxiom => a } |
| 182 | .map(_.getObject) | 224 | .map(_.getObject) |
| @@ -186,7 +228,7 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 186 | val concepts: List[OWLClass] = | 228 | val concepts: List[OWLClass] = |
| 187 | ontology.getClassesInSignature().asScala.toList | 229 | ontology.getClassesInSignature().asScala.toList |
| 188 | val roles: List[OWLObjectPropertyExpression] = | 230 | val roles: List[OWLObjectPropertyExpression] = |
| 189 | (tbox ++ rbox) | 231 | axioms |
| 190 | .flatMap(_.objectPropertyExpressionsInSignature) | 232 | .flatMap(_.objectPropertyExpressionsInSignature) |
| 191 | .distinct | 233 | .distinct |
| 192 | 234 | ||
| @@ -202,123 +244,36 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 202 | * if there exists a role p2 appearing in an axiom of type T4 and | 244 | * if there exists a role p2 appearing in an axiom of type T4 and |
| 203 | * p1 is a subproperty of either p2 or the inverse of p2. | 245 | * p1 is a subproperty of either p2 or the inverse of p2. |
| 204 | */ | 246 | */ |
| 205 | val unsafeRoles: List[OWLObjectPropertyExpression] = { | 247 | // val unsafeRoles: List[OWLObjectPropertyExpression] = { |
| 206 | 248 | ||
| 207 | /* Checking for unsafety condition (1) */ | 249 | // /* Checking for unsafety condition (1) */ |
| 208 | val unsafe1 = for { | 250 | // val unsafe1 = for { |
| 209 | axiom <- tbox | 251 | // axiom <- axioms |
| 210 | if axiom.isT5 | 252 | // if axiom.isT5 |
| 211 | role1 <- axiom.objectPropertyExpressionsInSignature | 253 | // role1 <- axiom.objectPropertyExpressionsInSignature |
| 212 | roleSuper = role1 +: reasoner.superObjectProperties(role1) | 254 | // roleSuper = role1 +: reasoner.superObjectProperties(role1) |
| 213 | roleSuperInv = roleSuper.map(_.getInverseProperty) | 255 | // roleSuperInv = roleSuper.map(_.getInverseProperty) |
| 214 | axiom <- tbox | 256 | // axiom <- axioms |
| 215 | if axiom.isT3 && !axiom.isT3top | 257 | // if axiom.isT3 && !axiom.isT3top |
| 216 | role2 <- axiom.objectPropertyExpressionsInSignature | 258 | // role2 <- axiom.objectPropertyExpressionsInSignature |
| 217 | if roleSuperInv contains role2 | 259 | // if roleSuperInv contains role2 |
| 218 | } yield role1 | 260 | // } yield role1 |
| 219 | 261 | ||
| 220 | /* Checking for unsafety condition (2) */ | 262 | // /* Checking for unsafety condition (2) */ |
| 221 | val unsafe2 = for { | 263 | // val unsafe2 = for { |
| 222 | axiom <- tbox | 264 | // axiom <- axioms |
| 223 | if axiom.isT5 | 265 | // if axiom.isT5 |
| 224 | role1 <- axiom.objectPropertyExpressionsInSignature | 266 | // role1 <- axiom.objectPropertyExpressionsInSignature |
| 225 | roleSuper = role1 +: reasoner.superObjectProperties(role1) | 267 | // roleSuper = role1 +: reasoner.superObjectProperties(role1) |
| 226 | roleSuperInv = roleSuper.map(_.getInverseProperty) | 268 | // roleSuperInv = roleSuper.map(_.getInverseProperty) |
| 227 | axiom <- tbox | 269 | // axiom <- axioms |
| 228 | if axiom.isT4 | 270 | // if axiom.isT4 |
| 229 | role2 <- axiom.objectPropertyExpressionsInSignature | 271 | // role2 <- axiom.objectPropertyExpressionsInSignature |
| 230 | if roleSuper.contains(role2) || roleSuperInv.contains(role2) | 272 | // if roleSuper.contains(role2) || roleSuperInv.contains(role2) |
| 231 | } yield role1 | 273 | // } yield role1 |
| 232 | 274 | ||
| 233 | unsafe1 ++ unsafe2 | 275 | // unsafe1 ++ unsafe2 |
| 234 | } | 276 | // } |
| 235 | |||
| 236 | /** Compute the RSA dependency graph | ||
| 237 | * | ||
| 238 | * This is used to approximate the input ontology to RSA. | ||
| 239 | * | ||
| 240 | * @return a tuple containing the dependency graph and a map between | ||
| 241 | * the constants newly introduced and the corresponding axioms in the | ||
| 242 | * ontology. | ||
| 243 | */ | ||
| 244 | private def dependencyGraph() | ||
| 245 | : (Graph[Resource, DiEdge], Map[String, OWLAxiom]) = { | ||
| 246 | val unsafe = this.unsafeRoles | ||
| 247 | var nodemap = Map.empty[String, OWLAxiom] | ||
| 248 | |||
| 249 | object RSAConverter extends RDFoxConverter { | ||
| 250 | |||
| 251 | override def convert( | ||
| 252 | expr: OWLClassExpression, | ||
| 253 | term: Term, | ||
| 254 | unsafe: List[OWLObjectPropertyExpression], | ||
| 255 | skolem: SkolemStrategy, | ||
| 256 | suffix: RSASuffix | ||
| 257 | ): Shards = | ||
| 258 | (expr, skolem) match { | ||
| 259 | |||
| 260 | case (e: OWLObjectSomeValuesFrom, c: Constant) => { | ||
| 261 | nodemap.update(c.iri.getIRI, c.axiom) | ||
| 262 | val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) | ||
| 263 | if (unsafe contains e.getProperty) | ||
| 264 | (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) | ||
| 265 | else | ||
| 266 | (RSA.PE(term, c.iri) :: res, ext) | ||
| 267 | } | ||
| 268 | |||
| 269 | case (e: OWLDataSomeValuesFrom, c: Constant) => { | ||
| 270 | nodemap.update(c.iri.getIRI, c.axiom) | ||
| 271 | val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) | ||
| 272 | if (unsafe contains e.getProperty) | ||
| 273 | (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) | ||
| 274 | else | ||
| 275 | (RSA.PE(term, c.iri) :: res, ext) | ||
| 276 | } | ||
| 277 | |||
| 278 | case _ => super.convert(expr, term, unsafe, skolem, suffix) | ||
| 279 | } | ||
| 280 | } | ||
| 281 | |||
| 282 | /* Ontology convertion into LP rules */ | ||
| 283 | val term = RSAOntology.genFreshVariable() | ||
| 284 | val result = axioms.map(a => | ||
| 285 | RSAConverter.convert(a, term, unsafe, new Constant(a), Empty) | ||
| 286 | ) | ||
| 287 | |||
| 288 | val datalog = result.unzip | ||
| 289 | val facts = datalog._1.flatten | ||
| 290 | var rules = datalog._2.flatten | ||
| 291 | |||
| 292 | /* Open connection with RDFox */ | ||
| 293 | val (server, data) = RDFoxUtil.openConnection("rsa_dependency_graph") | ||
| 294 | |||
| 295 | /* Add additional built-in rules */ | ||
| 296 | val varX = Variable.create("X") | ||
| 297 | val varY = Variable.create("Y") | ||
| 298 | rules = Rule.create( | ||
| 299 | RSA.E(varX, varY), | ||
| 300 | RSA.PE(varX, varY), | ||
| 301 | RSA.U(varX), | ||
| 302 | RSA.U(varY) | ||
| 303 | ) :: rules | ||
| 304 | /* Load facts and rules from ontology */ | ||
| 305 | RDFoxUtil.addFacts(data, facts) | ||
| 306 | RDFoxUtil.addRules(data, rules) | ||
| 307 | /* Load data files */ | ||
| 308 | RDFoxUtil.addData(data, datafiles: _*) | ||
| 309 | |||
| 310 | /* Build the graph */ | ||
| 311 | val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }" | ||
| 312 | val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get | ||
| 313 | var edges: Seq[DiEdge[Resource]] = | ||
| 314 | answers.collect { case (_, Seq(n1, n2)) => n1 ~> n2 } | ||
| 315 | val graph = Graph(edges: _*) | ||
| 316 | |||
| 317 | /* Close connection to RDFox */ | ||
| 318 | RDFoxUtil.closeConnection(server, data) | ||
| 319 | |||
| 320 | (graph, nodemap) | ||
| 321 | } | ||
| 322 | 277 | ||
| 323 | /** Approximate a Horn-ALCHOIQ ontology to RSA | 278 | /** Approximate a Horn-ALCHOIQ ontology to RSA |
| 324 | * | 279 | * |
| @@ -329,61 +284,61 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 329 | * @param graph the graph used to compute the axioms to remove. | 284 | * @param graph the graph used to compute the axioms to remove. |
| 330 | * @param nodemap map from graph nodes to ontology axioms. | 285 | * @param nodemap map from graph nodes to ontology axioms. |
| 331 | */ | 286 | */ |
| 332 | def toRSA(): RSAOntology = Logger.timed( | 287 | // def toRSA(): RSAOntology = Logger.timed( |
| 333 | { | 288 | // { |
| 334 | 289 | ||
| 335 | /* Compute the dependency graph for the ontology */ | 290 | // /* Compute the dependency graph for the ontology */ |
| 336 | val (graph, nodemap) = this.dependencyGraph() | 291 | // val (graph, nodemap) = this.dependencyGraph() |
| 337 | 292 | ||
| 338 | /* Define node colors for the graph visit */ | 293 | // /* Define node colors for the graph visit */ |
| 339 | sealed trait NodeColor | 294 | // sealed trait NodeColor |
| 340 | case object Unvisited extends NodeColor | 295 | // case object Unvisited extends NodeColor |
| 341 | case object Visited extends NodeColor | 296 | // case object Visited extends NodeColor |
| 342 | case object ToDelete extends NodeColor | 297 | // case object ToDelete extends NodeColor |
| 343 | 298 | ||
| 344 | /* Keep track of node colors during graph visit */ | 299 | // /* Keep track of node colors during graph visit */ |
| 345 | var color = Map.from[Resource, NodeColor]( | 300 | // var color = Map.from[Resource, NodeColor]( |
| 346 | graph.nodes.toOuter.map(k => (k, Unvisited)) | 301 | // graph.nodes.toOuter.map(k => (k, Unvisited)) |
| 347 | ) | 302 | // ) |
| 348 | 303 | ||
| 349 | for { | 304 | // for { |
| 350 | component <- graph.componentTraverser().map(_ to Graph) | 305 | // component <- graph.componentTraverser().map(_ to Graph) |
| 351 | edge <- component | 306 | // edge <- component |
| 352 | .outerEdgeTraverser(component.nodes.head) | 307 | // .outerEdgeTraverser(component.nodes.head) |
| 353 | .withKind(BreadthFirst) | 308 | // .withKind(BreadthFirst) |
| 354 | } yield { | 309 | // } yield { |
| 355 | val source = edge._1 | 310 | // val source = edge._1 |
| 356 | val target = edge._2 | 311 | // val target = edge._2 |
| 357 | color(source) match { | 312 | // color(source) match { |
| 358 | case Unvisited | Visited => { | 313 | // case Unvisited | Visited => { |
| 359 | color(target) match { | 314 | // color(target) match { |
| 360 | case Unvisited => | 315 | // case Unvisited => |
| 361 | color(source) = Visited; | 316 | // color(source) = Visited; |
| 362 | color(target) = Visited | 317 | // color(target) = Visited |
| 363 | case Visited => | 318 | // case Visited => |
| 364 | color(source) = ToDelete | 319 | // color(source) = ToDelete |
| 365 | case ToDelete => | 320 | // case ToDelete => |
| 366 | color(source) = Visited | 321 | // color(source) = Visited |
| 367 | } | 322 | // } |
| 368 | } | 323 | // } |
| 369 | case ToDelete => | 324 | // case ToDelete => |
| 370 | } | 325 | // } |
| 371 | } | 326 | // } |
| 372 | 327 | ||
| 373 | val toDelete = color.iterator.collect { case (resource: IRI, ToDelete) => | 328 | // val toDelete = color.iterator.collect { case (resource: IRI, ToDelete) => |
| 374 | nodemap(resource.getIRI) | 329 | // nodemap(resource.getIRI) |
| 375 | }.toSeq | 330 | // }.toSeq |
| 376 | 331 | ||
| 377 | /* Remove axioms from approximated ontology */ | 332 | // /* Remove axioms from approximated ontology */ |
| 378 | ontology.removeAxioms(toDelete: _*) | 333 | // ontology.removeAxioms(toDelete: _*) |
| 379 | this.removed = toDelete | 334 | // this.removed = toDelete |
| 380 | 335 | ||
| 381 | /* Return RSA ontology */ | 336 | // /* Return RSA ontology */ |
| 382 | RSAOntology(ontology, datafiles: _*) | 337 | // RSAOntology(ontology, datafiles: _*) |
| 383 | }, | 338 | // }, |
| 384 | "Horn-ALCHOIQ to RSA approximation:", | 339 | // "Horn-ALCHOIQ to RSA approximation:", |
| 385 | Logger.DEBUG | 340 | // Logger.DEBUG |
| 386 | ) | 341 | // ) |
| 387 | // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> | 342 | // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> |
| 388 | // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F, | 343 | // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F, |
| 389 | // 'C ~> 'G, 'D ~> 'C, 'H ~> 'D) | 344 | // 'C ~> 'G, 'D ~> 'C, 'H ~> 'D) |
| @@ -462,31 +417,27 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 462 | ) | 417 | ) |
| 463 | } | 418 | } |
| 464 | 419 | ||
| 420 | /** Canonical model of the ontology */ | ||
| 465 | lazy val canonicalModel = Logger.timed( | 421 | lazy val canonicalModel = Logger.timed( |
| 466 | new CanonicalModel(this), | 422 | new CanonicalModel(this), |
| 467 | "Generating canonical model program", | 423 | "Generating canonical model program", |
| 468 | Logger.DEBUG | 424 | Logger.DEBUG |
| 469 | ) | 425 | ) |
| 470 | 426 | ||
| 471 | def filteringProgram(query: ConjunctiveQuery): FilteringProgram = | 427 | /** Computes all roles conflicting with a given role |
| 472 | Logger.timed( | 428 | * |
| 473 | FilteringProgram(FilterType.REVISED)(query), | 429 | * @param role a role (object property expression). |
| 474 | "Generating filtering program", | 430 | * @return a set of roles conflicting with `role`. |
| 475 | Logger.DEBUG | 431 | */ |
| 476 | ) | ||
| 477 | |||
| 478 | def confl( | 432 | def confl( |
| 479 | role: OWLObjectPropertyExpression | 433 | role: OWLObjectPropertyExpression |
| 480 | ): Set[OWLObjectPropertyExpression] = { | 434 | ): Set[OWLObjectPropertyExpression] = { |
| 481 | 435 | reasoner | |
| 482 | val invSuperRoles = reasoner | ||
| 483 | .superObjectProperties(role) | 436 | .superObjectProperties(role) |
| 484 | .collect(Collectors.toSet()) | 437 | .collect(Collectors.toSet()) |
| 485 | .asScala | 438 | .asScala |
| 486 | .addOne(role) | 439 | .addOne(role) |
| 487 | .map(_.getInverseProperty) | 440 | .map(_.getInverseProperty) |
| 488 | |||
| 489 | invSuperRoles | ||
| 490 | .flatMap(x => | 441 | .flatMap(x => |
| 491 | reasoner | 442 | reasoner |
| 492 | .subObjectProperties(x) | 443 | .subObjectProperties(x) |
| @@ -498,6 +449,77 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 498 | .filterNot(_.getInverseProperty.isOWLTopObjectProperty()) | 449 | .filterNot(_.getInverseProperty.isOWLTopObjectProperty()) |
| 499 | } | 450 | } |
| 500 | 451 | ||
| 452 | /** Selfloop detection for a given axiom | ||
| 453 | * | ||
| 454 | * @param axiom an axiom of type [[OWLSubClassOfAxiom]] | ||
| 455 | * @return unfold set for the axiom | ||
| 456 | */ | ||
| 457 | def self(axiom: OWLSubClassOfAxiom): Set[Term] = { | ||
| 458 | val role = axiom.objectPropertyExpressionsInSignature(0) | ||
| 459 | if (this.confl(role).contains(role)) { | ||
| 460 | Set(RSA("v0_" ++ axiom.hashed), RSA("v1_" ++ axiom.hashed)) | ||
| 461 | } else { | ||
| 462 | Set() | ||
| 463 | } | ||
| 464 | } | ||
| 465 | |||
| 466 | /** Cycle detection for a give axiom | ||
| 467 | * | ||
| 468 | * @param axiom an axiom of type [[OWLSubClassOfAxiom]] | ||
| 469 | * @return unfold set for the axiom | ||
| 470 | * | ||
| 471 | * @todo we can actually use `toTriple` from `RSAAxiom` to get the | ||
| 472 | * classes and the role for a given axiom | ||
| 473 | */ | ||
| 474 | def cycle(axiom: OWLSubClassOfAxiom): Set[Term] = { | ||
| 475 | val classes = | ||
| 476 | axiom.classesInSignature.collect(Collectors.toList()).asScala | ||
| 477 | val classA = classes(0) | ||
| 478 | val roleR = axiom | ||
| 479 | .objectPropertyExpressionsInSignature(0) | ||
| 480 | .asInstanceOf[OWLObjectProperty] | ||
| 481 | val classB = classes(1) | ||
| 482 | cycle_aux(classA, roleR, classB) | ||
| 483 | } | ||
| 484 | |||
| 485 | /** Auxiliary function for [[RSAOntology.cycle]] */ | ||
| 486 | private def cycle_aux( | ||
| 487 | classA: OWLClass, | ||
| 488 | roleR: OWLObjectProperty, | ||
| 489 | classB: OWLClass | ||
| 490 | ): Set[Term] = { | ||
| 491 | val conflR = this.confl(roleR) | ||
| 492 | // TODO: technically we just need the TBox here | ||
| 493 | val terms = for { | ||
| 494 | axiom1 <- axioms | ||
| 495 | if axiom1.isT5 | ||
| 496 | // We expect only one role coming out of a T5 axiom | ||
| 497 | roleS <- axiom1.objectPropertyExpressionsInSignature | ||
| 498 | // Triples ordering is among triples involving safe roles. | ||
| 499 | if !unsafe.contains(roleS) | ||
| 500 | if conflR.contains(roleS) | ||
| 501 | tripleARB = RSAAxiom.hashed(classA, roleR, classB) | ||
| 502 | tripleDSC = axiom1.hashed | ||
| 503 | individual = | ||
| 504 | if (tripleARB > tripleDSC) { | ||
| 505 | RSA("v1_" ++ tripleDSC) | ||
| 506 | } else { | ||
| 507 | // Note that this is also the case for | ||
| 508 | // `tripleARB == tripleDSC` | ||
| 509 | RSA("v0_" ++ tripleDSC) | ||
| 510 | } | ||
| 511 | } yield individual | ||
| 512 | terms to Set | ||
| 513 | } | ||
| 514 | |||
| 515 | /** Returns unfold set for self-loop and cycle for the input axiom | ||
| 516 | * | ||
| 517 | * @param axiom an axiom of type [[OWLSubClassOfAxiom]] | ||
| 518 | * @return unfold set for the axiom | ||
| 519 | */ | ||
| 520 | def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = | ||
| 521 | this.self(axiom) | this.cycle(axiom) | ||
| 522 | |||
| 501 | /** Returns the answers to a query | 523 | /** Returns the answers to a query |
| 502 | * | 524 | * |
| 503 | * @param query query to execute | 525 | * @param query query to execute |
| @@ -505,10 +527,9 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 505 | */ | 527 | */ |
| 506 | def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed( | 528 | def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed( |
| 507 | { | 529 | { |
| 508 | import implicits.JavaCollections._ | ||
| 509 | val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) | 530 | val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) |
| 510 | val canon = this.canonicalModel | 531 | val canon = this.canonicalModel |
| 511 | val filter = this.filteringProgram(query) | 532 | val filter = RSAOntology.filteringProgram(query) |
| 512 | 533 | ||
| 513 | /* Upload data from data file */ | 534 | /* Upload data from data file */ |
| 514 | RDFoxUtil.addData(data, datafiles: _*) | 535 | RDFoxUtil.addData(data, datafiles: _*) |
| @@ -526,12 +547,15 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 526 | new java.util.HashMap[String, String] | 547 | new java.util.HashMap[String, String] |
| 527 | ) | 548 | ) |
| 528 | 549 | ||
| 550 | /* Add canonical model */ | ||
| 529 | Logger print s"Canonical model rules: ${canon.rules.length}" | 551 | Logger print s"Canonical model rules: ${canon.rules.length}" |
| 530 | RDFoxUtil.addRules(data, canon.rules) | 552 | RDFoxUtil.addRules(data, canon.rules) |
| 531 | 553 | ||
| 532 | Logger print s"Canonical model facts: ${canon.facts.length}" | 554 | Logger print s"Canonical model facts: ${canon.facts.length}" |
| 533 | RDFoxUtil.addFacts(data, canon.facts) | 555 | RDFoxUtil.addFacts(data, canon.facts) |
| 534 | 556 | ||
| 557 | RDFoxUtil printStatisticsFor data | ||
| 558 | |||
| 535 | //{ | 559 | //{ |
| 536 | // import java.io.{PrintStream, FileOutputStream, File} | 560 | // import java.io.{PrintStream, FileOutputStream, File} |
| 537 | // val rules1 = new FileOutputStream(new File("rules1-lubm200.dlog")) | 561 | // val rules1 = new FileOutputStream(new File("rules1-lubm200.dlog")) |
| @@ -541,16 +565,13 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 541 | // rules2.print(filter.rules.mkString("\n")) | 565 | // rules2.print(filter.rules.mkString("\n")) |
| 542 | //} | 566 | //} |
| 543 | 567 | ||
| 544 | //canon.facts.foreach(println) | 568 | /* Add filtering program */ |
| 545 | //filter.rules.foreach(println) | ||
| 546 | |||
| 547 | RDFoxUtil printStatisticsFor data | ||
| 548 | |||
| 549 | Logger print s"Filtering program rules: ${filter.rules.length}" | 569 | Logger print s"Filtering program rules: ${filter.rules.length}" |
| 550 | RDFoxUtil.addRules(data, filter.rules) | 570 | RDFoxUtil.addRules(data, filter.rules) |
| 551 | 571 | ||
| 552 | RDFoxUtil printStatisticsFor data | 572 | RDFoxUtil printStatisticsFor data |
| 553 | 573 | ||
| 574 | /* Gather answers to the query */ | ||
| 554 | val answers = { | 575 | val answers = { |
| 555 | val ans = filter.answerQuery | 576 | val ans = filter.answerQuery |
| 556 | RDFoxUtil | 577 | RDFoxUtil |
| @@ -558,7 +579,9 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 558 | .map(new ConjunctiveQueryAnswers(query.bcq, query.variables, _)) | 579 | .map(new ConjunctiveQueryAnswers(query.bcq, query.variables, _)) |
| 559 | .get | 580 | .get |
| 560 | } | 581 | } |
| 582 | |||
| 561 | RDFoxUtil.closeConnection(server, data) | 583 | RDFoxUtil.closeConnection(server, data) |
| 584 | |||
| 562 | answers | 585 | answers |
| 563 | }, | 586 | }, |
| 564 | "Answers computation", | 587 | "Answers computation", |
| @@ -569,14 +592,15 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 569 | * | 592 | * |
| 570 | * @note This method does not add any facts or rules to the data | 593 | * @note This method does not add any facts or rules to the data |
| 571 | * store. It is most useful after the execution of a query using | 594 | * store. It is most useful after the execution of a query using |
| 572 | * [[uk.ac.ox.cs.rsacomb.RSAOntology.ask RSAOntology.ask]]. | 595 | * [[RSAOntology.ask]]. |
| 573 | * @note This method has been introduced mostly for debugging purposes. | ||
| 574 | * | 596 | * |
| 575 | * @param query query to be executed against the environment | 597 | * @param query query to be executed against the environment |
| 576 | * @param prefixes additional prefixes for the query. It defaults to | 598 | * @param prefixes additional prefixes for the query. It defaults to |
| 577 | * an empty set. | 599 | * an empty set. |
| 578 | * @param opts additional options to RDFox. | 600 | * @param opts additional options to RDFox. |
| 579 | * @return a collection of answers to the input query. | 601 | * @return a collection of answers to the input query. |
| 602 | * | ||
| 603 | * @note This method has been introduced mostly for debugging purposes. | ||
| 580 | */ | 604 | */ |
| 581 | def queryDataStore( | 605 | def queryDataStore( |
| 582 | query: String, | 606 | query: String, |
| @@ -598,122 +622,11 @@ class RSAOntology(val original: OWLOntology, val datafiles: File*) { | |||
| 598 | * [[uk.ac.ox.cs.rsacomb.RSAOntology.ask RSAOntology.ask]] | 622 | * [[uk.ac.ox.cs.rsacomb.RSAOntology.ask RSAOntology.ask]] |
| 599 | * for the corresponding query has been called. | 623 | * for the corresponding query has been called. |
| 600 | */ | 624 | */ |
| 601 | def askUnfiltered( | 625 | // def askUnfiltered( |
| 602 | cq: ConjunctiveQuery | 626 | // cq: ConjunctiveQuery |
| 603 | ): Option[Seq[(Long, Seq[Resource])]] = { | 627 | // ): Option[Seq[(Long, Seq[Resource])]] = { |
| 604 | val query = RDFoxUtil.buildDescriptionQuery("QM", cq.variables.length) | 628 | // val query = RDFoxUtil.buildDescriptionQuery("QM", cq.variables.length) |
| 605 | queryDataStore(query, RSA.Prefixes) | 629 | // queryDataStore(query, RSA.Prefixes) |
| 606 | } | 630 | // } |
| 607 | 631 | ||
| 608 | def self(axiom: OWLSubClassOfAxiom): Set[Term] = { | 632 | } |
| 609 | // Assuming just one role in the signature of a T5 axiom | ||
| 610 | val role = axiom.objectPropertyExpressionsInSignature(0) | ||
| 611 | if (this.confl(role).contains(role)) { | ||
| 612 | Set( | ||
| 613 | RSA("v0_" ++ axiom.hashed), | ||
| 614 | RSA("v1_" ++ axiom.hashed) | ||
| 615 | ) | ||
| 616 | } else { | ||
| 617 | Set() | ||
| 618 | } | ||
| 619 | } | ||
| 620 | |||
| 621 | def cycle(axiom: OWLSubClassOfAxiom): Set[Term] = { | ||
| 622 | // TODO: we can actually use `toTriple` from `RSAAxiom` | ||
| 623 | val classes = | ||
| 624 | axiom.classesInSignature.collect(Collectors.toList()).asScala | ||
| 625 | val classA = classes(0) | ||
| 626 | val roleR = axiom | ||
| 627 | .objectPropertyExpressionsInSignature(0) | ||
| 628 | .asInstanceOf[OWLObjectProperty] | ||
| 629 | val classB = classes(1) | ||
| 630 | cycle_aux1(classA, roleR, classB) | ||
| 631 | } | ||
| 632 | |||
| 633 | def cycle_aux0( | ||
| 634 | classA: OWLClass, | ||
| 635 | roleR: OWLObjectProperty, | ||
| 636 | classB: OWLClass | ||
| 637 | ): Set[Term] = { | ||
| 638 | val conflR = this.confl(roleR) | ||
| 639 | val classes = ontology | ||
| 640 | .classesInSignature(Imports.INCLUDED) | ||
| 641 | .collect(Collectors.toSet()) | ||
| 642 | .asScala | ||
| 643 | for { | ||
| 644 | classD <- classes | ||
| 645 | roleS <- conflR | ||
| 646 | classC <- classes | ||
| 647 | // Keeping this check for now | ||
| 648 | if !unsafeRoles.contains(roleS) | ||
| 649 | tripleARB = RSAAxiom.hashed(classA, roleR, classB) | ||
| 650 | tripleDSC = RSAAxiom.hashed(classD, roleS, classC) | ||
| 651 | individual = | ||
| 652 | if (tripleARB > tripleDSC) { | ||
| 653 | RSA("v1_" ++ tripleDSC) | ||
| 654 | } else { | ||
| 655 | // Note that this is also the case for | ||
| 656 | // `tripleARB == tripleDSC` | ||
| 657 | RSA("v0_" ++ tripleDSC) | ||
| 658 | } | ||
| 659 | } yield individual | ||
| 660 | } | ||
| 661 | |||
| 662 | def cycle_aux1( | ||
| 663 | classA: OWLClass, | ||
| 664 | roleR: OWLObjectProperty, | ||
| 665 | classB: OWLClass | ||
| 666 | ): Set[Term] = { | ||
| 667 | val conflR = this.confl(roleR) | ||
| 668 | // We just need the TBox to find | ||
| 669 | val terms = for { | ||
| 670 | axiom1 <- tbox | ||
| 671 | if axiom1.isT5 | ||
| 672 | // We expect only one role coming out of a T5 axiom | ||
| 673 | roleS <- axiom1.objectPropertyExpressionsInSignature | ||
| 674 | // Triples ordering is among triples involving safe roles. | ||
| 675 | if !unsafeRoles.contains(roleS) | ||
| 676 | if conflR.contains(roleS) | ||
| 677 | tripleARB = RSAAxiom.hashed(classA, roleR, classB) | ||
| 678 | tripleDSC = axiom1.hashed | ||
| 679 | individual = | ||
| 680 | if (tripleARB > tripleDSC) { | ||
| 681 | RSA("v1_" ++ tripleDSC) | ||
| 682 | } else { | ||
| 683 | // Note that this is also the case for | ||
| 684 | // `tripleARB == tripleDSC` | ||
| 685 | RSA("v0_" ++ tripleDSC) | ||
| 686 | } | ||
| 687 | } yield individual | ||
| 688 | terms to Set | ||
| 689 | } | ||
| 690 | |||
| 691 | def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = | ||
| 692 | this.self(axiom) | this.cycle(axiom) | ||
| 693 | |||
| 694 | /** Log normalization/approximation statistics */ | ||
| 695 | def statistics(level: Logger.Level = Logger.DEBUG): Unit = { | ||
| 696 | Logger.print( | ||
| 697 | s"Logical axioms in original input ontology: ${original.getLogicalAxiomCount(true)}", | ||
| 698 | level | ||
| 699 | ) | ||
| 700 | Logger.print( | ||
| 701 | s"Logical axioms discarded in Horn-ALCHOIQ approximation: ${normalizer.discarded}", | ||
| 702 | level | ||
| 703 | ) | ||
| 704 | Logger.print( | ||
| 705 | s"Logical axioms shifted in Horn-ALCHOIQ approximation: ${normalizer.shifted}", | ||
| 706 | level | ||
| 707 | ) | ||
| 708 | Logger.print( | ||
| 709 | s"Logical axioms in Horn-ALCHOIQ ontology: ${ontology | ||
| 710 | .getLogicalAxiomCount(true)} (${tbox.length}/${rbox.length}/${abox.length})", | ||
| 711 | level | ||
| 712 | ) | ||
| 713 | Logger.print( | ||
| 714 | s"Logical axioms discarded in RSA approximation: ${removed.length}", | ||
| 715 | level | ||
| 716 | ) | ||
| 717 | } | ||
| 718 | |||
| 719 | } // class RSAOntology | ||
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Approximation.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Approximation.scala new file mode 100644 index 0000000..344f0fe --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Approximation.scala | |||
| @@ -0,0 +1,18 @@ | |||
| 1 | package uk.ac.ox.cs.rsacomb.approximation | ||
| 2 | |||
| 3 | import java.io.File | ||
| 4 | import org.semanticweb.owlapi.model.OWLLogicalAxiom | ||
| 5 | |||
| 6 | import uk.ac.ox.cs.rsacomb.ontology.Ontology | ||
| 7 | |||
| 8 | /** Ontology approximation technique. */ | ||
| 9 | trait Approximation[T] { | ||
| 10 | |||
| 11 | /** Approximate an ontology. | ||
| 12 | * | ||
| 13 | * @param ontology input ontology as a list of axioms | ||
| 14 | * @return the approximated ontology | ||
| 15 | */ | ||
| 16 | def approximate(ontology: Ontology): T | ||
| 17 | |||
| 18 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala new file mode 100644 index 0000000..60a88fb --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala | |||
| @@ -0,0 +1,232 @@ | |||
| 1 | package uk.ac.ox.cs.rsacomb.approximation | ||
| 2 | |||
| 3 | import java.io.File | ||
| 4 | |||
| 5 | import org.semanticweb.owlapi.apibinding.OWLManager | ||
| 6 | import org.semanticweb.owlapi.model.{IRI => _, _} | ||
| 7 | |||
| 8 | import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, IRI} | ||
| 9 | |||
| 10 | import scala.collection.mutable.{Set, Map} | ||
| 11 | import scalax.collection.Graph | ||
| 12 | import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ | ||
| 13 | import scalax.collection.GraphTraversal._ | ||
| 14 | |||
| 15 | import uk.ac.ox.cs.rsacomb.RSAOntology | ||
| 16 | import uk.ac.ox.cs.rsacomb.RSAUtil | ||
| 17 | import uk.ac.ox.cs.rsacomb.ontology.Ontology | ||
| 18 | |||
| 19 | object LowerBound { | ||
| 20 | |||
| 21 | private val manager = OWLManager.createOWLOntologyManager() | ||
| 22 | private val factory = manager.getOWLDataFactory() | ||
| 23 | |||
| 24 | } | ||
| 25 | |||
| 26 | /** Approximation algorithm that mantains soundness for CQ answering. | ||
| 27 | * | ||
| 28 | * The input OWL 2 ontology is assumed to be normalized and the output | ||
| 29 | * ontology is guaranteed to be in RSA. | ||
| 30 | * | ||
| 31 | * The algorithm is performed in three steps: | ||
| 32 | * 1. the ontology is reduced to ALCHOIQ by discarding any axiom | ||
| 33 | * that is not in the language; | ||
| 34 | * 2. the ontology is further reduced to Horn-ALCHOIQ by shifting | ||
| 35 | * axioms with disjunction on the rhs; | ||
| 36 | * 3. the ontology is approximated to RSA by manipulating its | ||
| 37 | * dependency graph. | ||
| 38 | * | ||
| 39 | * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] | ||
| 40 | */ | ||
| 41 | class LowerBound extends Approximation[RSAOntology] { | ||
| 42 | |||
| 43 | /** Simplify conversion between Java and Scala collections */ | ||
| 44 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | ||
| 45 | |||
| 46 | /** Simplify conversion between OWLAPI and RDFox concepts */ | ||
| 47 | import uk.ac.ox.cs.rsacomb.implicits.RDFox._ | ||
| 48 | |||
| 49 | /** Main entry point for the approximation algorithm */ | ||
| 50 | def approximate(ontology: Ontology): RSAOntology = | ||
| 51 | toRSA( | ||
| 52 | new Ontology( | ||
| 53 | ontology.axioms filter inALCHOIQ flatMap shift, | ||
| 54 | ontology.datafiles | ||
| 55 | ) | ||
| 56 | ) | ||
| 57 | |||
| 58 | /** Discards all axioms outside ALCHOIQ */ | ||
| 59 | private def inALCHOIQ(axiom: OWLLogicalAxiom): Boolean = | ||
| 60 | axiom match { | ||
| 61 | case a: OWLSubClassOfAxiom => { | ||
| 62 | val sub = a.getSubClass.getNNF | ||
| 63 | val sup = a.getSuperClass.getNNF | ||
| 64 | (sub, sup) match { | ||
| 65 | case (sub: OWLObjectAllValuesFrom, _) => false | ||
| 66 | case (sub: OWLDataAllValuesFrom, _) => false | ||
| 67 | case (_, sup: OWLDataAllValuesFrom) => false | ||
| 68 | case (sub: OWLObjectMinCardinality, _) if sub.getCardinality >= 2 => | ||
| 69 | false | ||
| 70 | case (sub: OWLDataMinCardinality, _) if sub.getCardinality >= 2 => | ||
| 71 | false | ||
| 72 | case (_, sup: OWLObjectMinCardinality) if sup.getCardinality >= 2 => | ||
| 73 | false | ||
| 74 | case (_, sup: OWLDataMinCardinality) if sup.getCardinality >= 2 => | ||
| 75 | false | ||
| 76 | case (sub: OWLObjectMaxCardinality, _) => false | ||
| 77 | case (sub: OWLDataMaxCardinality, _) => false | ||
| 78 | case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => | ||
| 79 | false | ||
| 80 | case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => | ||
| 81 | false | ||
| 82 | case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => | ||
| 83 | false | ||
| 84 | case (sub: OWLObjectHasSelf, _) => false | ||
| 85 | case (_, sup: OWLObjectHasSelf) => false | ||
| 86 | case _ => true | ||
| 87 | } | ||
| 88 | } | ||
| 89 | case a: OWLTransitiveObjectPropertyAxiom => false | ||
| 90 | case a: OWLReflexiveObjectPropertyAxiom => false | ||
| 91 | case a: OWLSubPropertyChainOfAxiom => false | ||
| 92 | case a: OWLAsymmetricObjectPropertyAxiom => false | ||
| 93 | case a => true | ||
| 94 | } | ||
| 95 | |||
| 96 | /** Shifting axioms with disjunction on the rhs. | ||
| 97 | * | ||
| 98 | * The process of shifting presenves soundness but completenes w.r.t. | ||
| 99 | * CQ answering is lost. | ||
| 100 | * | ||
| 101 | * @example | ||
| 102 | * | ||
| 103 | * A -> B1 u B2 u B3 . | ||
| 104 | * | ||
| 105 | * becomes | ||
| 106 | * | ||
| 107 | * A n nB1 n nB2 n nB3 -> bot . | ||
| 108 | * A n nB1 n nB2 -> B3 . | ||
| 109 | * A n nB1 n nB3 -> B2 . | ||
| 110 | * A n nB2 n nB3 -> B1 . | ||
| 111 | * nB1 n nB2 n nB3 -> nA . | ||
| 112 | * | ||
| 113 | * where nA, nB1, nB2, nB3 are fresh predicates "corresponding" to | ||
| 114 | * the negation of A, B1, B2, B3 respectively. | ||
| 115 | * | ||
| 116 | * @note this method maintains the normal form of the input axiom. | ||
| 117 | */ | ||
| 118 | private def shift(axiom: OWLLogicalAxiom): List[OWLLogicalAxiom] = | ||
| 119 | axiom match { | ||
| 120 | case a: OWLSubClassOfAxiom => { | ||
| 121 | val sub = a.getSubClass.getNNF | ||
| 122 | val sup = a.getSuperClass.getNNF | ||
| 123 | sup match { | ||
| 124 | case sup: OWLObjectUnionOf => { | ||
| 125 | val body = sub.asConjunctSet.map((atom) => | ||
| 126 | (atom, RSAUtil.getFreshOWLClass()) | ||
| 127 | ) | ||
| 128 | val head = sup.asDisjunctSet.map((atom) => | ||
| 129 | (atom, RSAUtil.getFreshOWLClass()) | ||
| 130 | ) | ||
| 131 | |||
| 132 | val r1 = | ||
| 133 | LowerBound.factory.getOWLSubClassOfAxiom( | ||
| 134 | LowerBound.factory.getOWLObjectIntersectionOf( | ||
| 135 | (body.map(_._1) ++ head.map(_._2)): _* | ||
| 136 | ), | ||
| 137 | LowerBound.factory.getOWLNothing | ||
| 138 | ) | ||
| 139 | |||
| 140 | val r2s = | ||
| 141 | for { | ||
| 142 | (a, na) <- head | ||
| 143 | hs = head.map(_._2).filterNot(_ equals na) | ||
| 144 | } yield LowerBound.factory.getOWLSubClassOfAxiom( | ||
| 145 | LowerBound.factory.getOWLObjectIntersectionOf( | ||
| 146 | (body.map(_._1) ++ hs): _* | ||
| 147 | ), | ||
| 148 | a | ||
| 149 | ) | ||
| 150 | |||
| 151 | val r3s = | ||
| 152 | for { | ||
| 153 | (a, na) <- body | ||
| 154 | bs = body.map(_._1).filterNot(_ equals a) | ||
| 155 | } yield LowerBound.factory.getOWLSubClassOfAxiom( | ||
| 156 | LowerBound.factory.getOWLObjectIntersectionOf( | ||
| 157 | (bs ++ head.map(_._2)): _* | ||
| 158 | ), | ||
| 159 | na | ||
| 160 | ) | ||
| 161 | |||
| 162 | List(r1) ++ r2s ++ r3s | ||
| 163 | } | ||
| 164 | case _ => List(axiom) | ||
| 165 | } | ||
| 166 | } | ||
| 167 | case _ => List(axiom) | ||
| 168 | } | ||
| 169 | |||
| 170 | /** Approximate a Horn-ALCHOIQ ontology to RSA | ||
| 171 | * | ||
| 172 | * This is done by gathering those axioms that prevent the ontology | ||
| 173 | * dependency graph from being tree-shaped, and removing them. | ||
| 174 | * | ||
| 175 | * @param ontology the set of axioms to approximate. | ||
| 176 | * @return the approximated RSA ontology | ||
| 177 | */ | ||
| 178 | private def toRSA(ontology: Ontology): RSAOntology = { | ||
| 179 | /* Compute the dependency graph for the ontology */ | ||
| 180 | val (graph, nodemap) = ontology.dependencyGraph | ||
| 181 | |||
| 182 | /* Define node colors for the graph visit */ | ||
| 183 | sealed trait NodeColor | ||
| 184 | case object Unvisited extends NodeColor | ||
| 185 | case object Visited extends NodeColor | ||
| 186 | case object ToDelete extends NodeColor | ||
| 187 | |||
| 188 | /* Keep track of node colors during graph visit */ | ||
| 189 | var color = Map.from[Resource, NodeColor]( | ||
| 190 | graph.nodes.toOuter.map(k => (k, Unvisited)) | ||
| 191 | ) | ||
| 192 | |||
| 193 | for { | ||
| 194 | component <- graph.componentTraverser().map(_ to Graph) | ||
| 195 | edge <- component | ||
| 196 | .outerEdgeTraverser(component.nodes.head) | ||
| 197 | .withKind(BreadthFirst) | ||
| 198 | } yield { | ||
| 199 | val source = edge._1 | ||
| 200 | val target = edge._2 | ||
| 201 | color(source) match { | ||
| 202 | case Unvisited | Visited => { | ||
| 203 | color(target) match { | ||
| 204 | case Unvisited => | ||
| 205 | color(source) = Visited; | ||
| 206 | color(target) = Visited | ||
| 207 | case Visited => | ||
| 208 | color(source) = ToDelete | ||
| 209 | case ToDelete => | ||
| 210 | color(source) = Visited | ||
| 211 | } | ||
| 212 | } | ||
| 213 | case ToDelete => | ||
| 214 | } | ||
| 215 | } | ||
| 216 | |||
| 217 | val toDelete = color.collect { case (resource: IRI, ToDelete) => | ||
| 218 | nodemap(resource.getIRI) | ||
| 219 | }.toList | ||
| 220 | |||
| 221 | /* Remove axioms from approximated ontology */ | ||
| 222 | RSAOntology(ontology.axioms diff toDelete, ontology.datafiles) | ||
| 223 | } | ||
| 224 | |||
| 225 | // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> | ||
| 226 | // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F, | ||
| 227 | // 'C ~> 'G, 'D ~> 'C, 'H ~> 'D) | ||
| 228 | // val edges2 = Seq('I ~> 'M, 'I ~> 'L, 'L ~> 'N, 'M ~> 'N) | ||
| 229 | // val edges3 = Seq('P ~> 'O) | ||
| 230 | // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3) | ||
| 231 | // | ||
| 232 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala index 254fd13..285040e 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala | |||
| @@ -20,6 +20,8 @@ import org.semanticweb.owlapi.apibinding.OWLManager | |||
| 20 | import org.semanticweb.owlapi.model._ | 20 | import org.semanticweb.owlapi.model._ |
| 21 | 21 | ||
| 22 | import uk.ac.ox.cs.rsacomb.util.Logger | 22 | import uk.ac.ox.cs.rsacomb.util.Logger |
| 23 | import uk.ac.ox.cs.rsacomb.RSAOntology | ||
| 24 | import uk.ac.ox.cs.rsacomb.RSAUtil | ||
| 23 | 25 | ||
| 24 | object Normalizer { | 26 | object Normalizer { |
| 25 | 27 | ||
| @@ -41,12 +43,6 @@ class Normalizer() { | |||
| 41 | /** Simplify conversion between Java and Scala collections */ | 43 | /** Simplify conversion between Java and Scala collections */ |
| 42 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | 44 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ |
| 43 | 45 | ||
| 44 | private var counter = -1 | ||
| 45 | def freshOWLClass(): OWLClass = { | ||
| 46 | counter += 1 | ||
| 47 | factory.getOWLClass(s"X$counter") | ||
| 48 | } | ||
| 49 | |||
| 50 | /** Statistics */ | 46 | /** Statistics */ |
| 51 | var discarded = 0 | 47 | var discarded = 0 |
| 52 | var shifted = 0 | 48 | var shifted = 0 |
| @@ -74,7 +70,7 @@ class Normalizer() { | |||
| 74 | * C c D -> { C c X, X c D } | 70 | * C c D -> { C c X, X c D } |
| 75 | */ | 71 | */ |
| 76 | case _ if !sub.isOWLClass && !sup.isOWLClass => { | 72 | case _ if !sub.isOWLClass && !sup.isOWLClass => { |
| 77 | val cls = freshOWLClass() | 73 | val cls = RSAUtil.getFreshOWLClass() |
| 78 | Seq( | 74 | Seq( |
| 79 | factory.getOWLSubClassOfAxiom(sub, cls), | 75 | factory.getOWLSubClassOfAxiom(sub, cls), |
| 80 | factory.getOWLSubClassOfAxiom(cls, sup) | 76 | factory.getOWLSubClassOfAxiom(cls, sup) |
| @@ -95,7 +91,7 @@ class Normalizer() { | |||
| 95 | if (conj.isOWLClass) | 91 | if (conj.isOWLClass) |
| 96 | (acc1 :+ conj, acc2) | 92 | (acc1 :+ conj, acc2) |
| 97 | else { | 93 | else { |
| 98 | val cls = freshOWLClass() | 94 | val cls = RSAUtil.getFreshOWLClass() |
| 99 | ( | 95 | ( |
| 100 | acc1 :+ cls, | 96 | acc1 :+ cls, |
| 101 | acc2 :+ factory.getOWLSubClassOfAxiom(conj, cls) | 97 | acc2 :+ factory.getOWLSubClassOfAxiom(conj, cls) |
| @@ -154,7 +150,7 @@ class Normalizer() { | |||
| 154 | */ | 150 | */ |
| 155 | case (sub: OWLObjectSomeValuesFrom, _) | 151 | case (sub: OWLObjectSomeValuesFrom, _) |
| 156 | if !sub.getFiller.isOWLClass => { | 152 | if !sub.getFiller.isOWLClass => { |
| 157 | val cls = freshOWLClass() | 153 | val cls = RSAUtil.getFreshOWLClass() |
| 158 | Seq( | 154 | Seq( |
| 159 | factory.getOWLSubClassOfAxiom(sub.getFiller, cls), | 155 | factory.getOWLSubClassOfAxiom(sub.getFiller, cls), |
| 160 | factory.getOWLSubClassOfAxiom( | 156 | factory.getOWLSubClassOfAxiom( |
| @@ -169,7 +165,7 @@ class Normalizer() { | |||
| 169 | */ | 165 | */ |
| 170 | case (_, sup: OWLObjectSomeValuesFrom) | 166 | case (_, sup: OWLObjectSomeValuesFrom) |
| 171 | if !sup.getFiller.isOWLClass => { | 167 | if !sup.getFiller.isOWLClass => { |
| 172 | val cls = freshOWLClass() | 168 | val cls = RSAUtil.getFreshOWLClass() |
| 173 | Seq( | 169 | Seq( |
| 174 | factory.getOWLSubClassOfAxiom(cls, sup.getFiller), | 170 | factory.getOWLSubClassOfAxiom(cls, sup.getFiller), |
| 175 | factory.getOWLSubClassOfAxiom( | 171 | factory.getOWLSubClassOfAxiom( |
| @@ -314,7 +310,7 @@ class Normalizer() { | |||
| 314 | ) | 310 | ) |
| 315 | case (_, sup: OWLObjectMaxCardinality) | 311 | case (_, sup: OWLObjectMaxCardinality) |
| 316 | if sup.getCardinality == 1 && !sup.getFiller.isOWLClass => { | 312 | if sup.getCardinality == 1 && !sup.getFiller.isOWLClass => { |
| 317 | val cls = freshOWLClass() | 313 | val cls = RSAUtil.getFreshOWLClass() |
| 318 | Seq( | 314 | Seq( |
| 319 | factory.getOWLSubClassOfAxiom(cls, sup.getFiller), | 315 | factory.getOWLSubClassOfAxiom(cls, sup.getFiller), |
| 320 | factory.getOWLSubClassOfAxiom( | 316 | factory.getOWLSubClassOfAxiom( |
| @@ -504,7 +500,7 @@ class Normalizer() { | |||
| 504 | * C(a) -> { X(a), X c C } | 500 | * C(a) -> { X(a), X c C } |
| 505 | */ | 501 | */ |
| 506 | case a: OWLClassAssertionAxiom if !a.getClassExpression.isOWLClass => { | 502 | case a: OWLClassAssertionAxiom if !a.getClassExpression.isOWLClass => { |
| 507 | val cls = freshOWLClass() | 503 | val cls = RSAUtil.getFreshOWLClass() |
| 508 | Seq( | 504 | Seq( |
| 509 | factory.getOWLClassAssertionAxiom(cls, a.getIndividual), | 505 | factory.getOWLClassAssertionAxiom(cls, a.getIndividual), |
| 510 | factory.getOWLSubClassOfAxiom(cls, a.getClassExpression) | 506 | factory.getOWLSubClassOfAxiom(cls, a.getClassExpression) |
| @@ -548,8 +544,10 @@ class Normalizer() { | |||
| 548 | sub: OWLClassExpression, | 544 | sub: OWLClassExpression, |
| 549 | sup: OWLObjectUnionOf | 545 | sup: OWLObjectUnionOf |
| 550 | ): Seq[OWLLogicalAxiom] = { | 546 | ): Seq[OWLLogicalAxiom] = { |
| 551 | val body = sub.asConjunctSet.map((atom) => (atom, freshOWLClass())) | 547 | val body = |
| 552 | val head = sup.asDisjunctSet.map((atom) => (atom, freshOWLClass())) | 548 | sub.asConjunctSet.map((atom) => (atom, RSAUtil.getFreshOWLClass())) |
| 549 | val head = | ||
| 550 | sup.asDisjunctSet.map((atom) => (atom, RSAUtil.getFreshOWLClass())) | ||
| 553 | 551 | ||
| 554 | /* Update statistics */ | 552 | /* Update statistics */ |
| 555 | shifted += 1 | 553 | shifted += 1 |
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala index b892ff5..3aa3c5f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala | |||
| @@ -27,6 +27,7 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{ | |||
| 27 | TupleTableAtom | 27 | TupleTableAtom |
| 28 | } | 28 | } |
| 29 | import tech.oxfordsemantic.jrdfox.logic.expression.{Term, IRI, FunctionCall} | 29 | import tech.oxfordsemantic.jrdfox.logic.expression.{Term, IRI, FunctionCall} |
| 30 | import uk.ac.ox.cs.rsacomb.RSAUtil | ||
| 30 | import uk.ac.ox.cs.rsacomb.RSAOntology | 31 | import uk.ac.ox.cs.rsacomb.RSAOntology |
| 31 | import uk.ac.ox.cs.rsacomb.suffix.{Empty, Inverse, RSASuffix} | 32 | import uk.ac.ox.cs.rsacomb.suffix.{Empty, Inverse, RSASuffix} |
| 32 | import uk.ac.ox.cs.rsacomb.util.{RSA, RDFoxUtil} | 33 | import uk.ac.ox.cs.rsacomb.util.{RSA, RDFoxUtil} |
| @@ -159,14 +160,14 @@ trait RDFoxConverter { | |||
| 159 | } | 160 | } |
| 160 | 161 | ||
| 161 | case a: OWLSubObjectPropertyOfAxiom => { | 162 | case a: OWLSubObjectPropertyOfAxiom => { |
| 162 | val term1 = RSAOntology.genFreshVariable() | 163 | val term1 = RSAUtil.genFreshVariable() |
| 163 | val body = convert(a.getSubProperty, term, term1, suffix) | 164 | val body = convert(a.getSubProperty, term, term1, suffix) |
| 164 | val head = convert(a.getSuperProperty, term, term1, suffix) | 165 | val head = convert(a.getSuperProperty, term, term1, suffix) |
| 165 | ResultR(List(Rule.create(head, body))) | 166 | ResultR(List(Rule.create(head, body))) |
| 166 | } | 167 | } |
| 167 | 168 | ||
| 168 | case a: OWLSubDataPropertyOfAxiom => { | 169 | case a: OWLSubDataPropertyOfAxiom => { |
| 169 | val term1 = RSAOntology.genFreshVariable() | 170 | val term1 = RSAUtil.genFreshVariable() |
| 170 | val body = convert(a.getSubProperty, term, term1, suffix) | 171 | val body = convert(a.getSubProperty, term, term1, suffix) |
| 171 | val head = convert(a.getSuperProperty, term, term1, suffix) | 172 | val head = convert(a.getSuperProperty, term, term1, suffix) |
| 172 | ResultR(List(Rule.create(head, body))) | 173 | ResultR(List(Rule.create(head, body))) |
| @@ -176,7 +177,7 @@ trait RDFoxConverter { | |||
| 176 | convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) | 177 | convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) |
| 177 | 178 | ||
| 178 | case a: OWLObjectPropertyRangeAxiom => { | 179 | case a: OWLObjectPropertyRangeAxiom => { |
| 179 | val term1 = RSAOntology.genFreshVariable() | 180 | val term1 = RSAUtil.genFreshVariable() |
| 180 | val (res, ext) = convert(a.getRange, term, unsafe, skolem, suffix) | 181 | val (res, ext) = convert(a.getRange, term, unsafe, skolem, suffix) |
| 181 | val prop = convert(a.getProperty, term1, term, suffix) | 182 | val prop = convert(a.getProperty, term1, term, suffix) |
| 182 | ResultR(List(Rule.create(res, prop :: ext))) | 183 | ResultR(List(Rule.create(res, prop :: ext))) |
| @@ -343,7 +344,7 @@ trait RDFoxConverter { | |||
| 343 | case e: OWLObjectSomeValuesFrom => { | 344 | case e: OWLObjectSomeValuesFrom => { |
| 344 | val cls = e.getFiller() | 345 | val cls = e.getFiller() |
| 345 | val role = e.getProperty() | 346 | val role = e.getProperty() |
| 346 | val varX = RSAOntology.genFreshVariable | 347 | val varX = RSAUtil.genFreshVariable |
| 347 | val (bind, term1) = skolem match { | 348 | val (bind, term1) = skolem match { |
| 348 | case NoSkolem => (None, varX) | 349 | case NoSkolem => (None, varX) |
| 349 | case c: Constant => (None, c.iri) | 350 | case c: Constant => (None, c.iri) |
| @@ -370,7 +371,7 @@ trait RDFoxConverter { | |||
| 370 | // Computes the result of rule skolemization. Depending on the used | 371 | // Computes the result of rule skolemization. Depending on the used |
| 371 | // technique it might involve the introduction of additional atoms, | 372 | // technique it might involve the introduction of additional atoms, |
| 372 | // and/or fresh constants and variables. | 373 | // and/or fresh constants and variables. |
| 373 | val varX = RSAOntology.genFreshVariable | 374 | val varX = RSAUtil.genFreshVariable |
| 374 | val (bind, term1) = skolem match { | 375 | val (bind, term1) = skolem match { |
| 375 | case NoSkolem => (None, varX) | 376 | case NoSkolem => (None, varX) |
| 376 | case c: Constant => (None, c.iri) | 377 | case c: Constant => (None, c.iri) |
| @@ -395,7 +396,7 @@ trait RDFoxConverter { | |||
| 395 | s"Class expression '$e' has cardinality restriction != 1." | 396 | s"Class expression '$e' has cardinality restriction != 1." |
| 396 | ) | 397 | ) |
| 397 | val vars @ (y :: z :: _) = | 398 | val vars @ (y :: z :: _) = |
| 398 | Seq(RSAOntology.genFreshVariable(), RSAOntology.genFreshVariable()) | 399 | Seq(RSAUtil.genFreshVariable(), RSAUtil.genFreshVariable()) |
| 399 | val cls = e.getFiller | 400 | val cls = e.getFiller |
| 400 | val role = e.getProperty | 401 | val role = e.getProperty |
| 401 | val (res, ext) = vars.map(convert(cls, _, unsafe, skolem, suffix)).unzip | 402 | val (res, ext) = vars.map(convert(cls, _, unsafe, skolem, suffix)).unzip |
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala index ea87782..09bfa1e 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala | |||
| @@ -25,6 +25,7 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{ | |||
| 25 | } | 25 | } |
| 26 | import tech.oxfordsemantic.jrdfox.logic.expression.{IRI} | 26 | import tech.oxfordsemantic.jrdfox.logic.expression.{IRI} |
| 27 | 27 | ||
| 28 | import uk.ac.ox.cs.rsacomb.RSAUtil | ||
| 28 | import uk.ac.ox.cs.rsacomb.RSAOntology | 29 | import uk.ac.ox.cs.rsacomb.RSAOntology |
| 29 | import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Nth} | 30 | import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Nth} |
| 30 | import uk.ac.ox.cs.rsacomb.util.RDFoxUtil | 31 | import uk.ac.ox.cs.rsacomb.util.RDFoxUtil |
| @@ -94,7 +95,7 @@ object RSAAtom { | |||
| 94 | if (isRDF) { | 95 | if (isRDF) { |
| 95 | (None, List(atom)) | 96 | (None, List(atom)) |
| 96 | } else { | 97 | } else { |
| 97 | val varS = RSAOntology.genFreshVariable() | 98 | val varS = RSAUtil.genFreshVariable() |
| 98 | val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*) | 99 | val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*) |
| 99 | val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name) | 100 | val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name) |
| 100 | val atoms = args.zipWithIndex | 101 | val atoms = args.zipWithIndex |
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala new file mode 100644 index 0000000..ba44605 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala | |||
| @@ -0,0 +1,301 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2020, 2021 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.rsacomb.ontology | ||
| 18 | |||
| 19 | import java.io.File | ||
| 20 | import java.util.stream.Collectors | ||
| 21 | |||
| 22 | import scala.collection.mutable.Map | ||
| 23 | import scala.collection.JavaConverters._ | ||
| 24 | import scalax.collection.Graph | ||
| 25 | import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ | ||
| 26 | |||
| 27 | import org.semanticweb.owlapi.model.parameters.Imports | ||
| 28 | import org.semanticweb.owlapi.apibinding.OWLManager | ||
| 29 | import org.semanticweb.owlapi.model.{OWLOntology, OWLAxiom, OWLLogicalAxiom} | ||
| 30 | import org.semanticweb.owlapi.model.{OWLObjectPropertyExpression} | ||
| 31 | import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory | ||
| 32 | import tech.oxfordsemantic.jrdfox.logic.datalog.Rule | ||
| 33 | import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, Term, Variable} | ||
| 34 | |||
| 35 | import uk.ac.ox.cs.rsacomb.approximation.Approximation | ||
| 36 | import uk.ac.ox.cs.rsacomb.converter._ | ||
| 37 | import uk.ac.ox.cs.rsacomb.suffix._ | ||
| 38 | import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA} | ||
| 39 | |||
| 40 | import uk.ac.ox.cs.rsacomb.RSAUtil | ||
| 41 | |||
| 42 | object Ontology { | ||
| 43 | |||
| 44 | /** Simplify conversion between Java and Scala collections */ | ||
| 45 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | ||
| 46 | |||
| 47 | /** Type wrapper representing a dependency graph for the ontology. | ||
| 48 | * | ||
| 49 | * The graph is returned along with a map associating each node (IRI | ||
| 50 | * string of the resource), with the corresponding axiom in the | ||
| 51 | * original TBox. | ||
| 52 | */ | ||
| 53 | type DependencyGraph = (Graph[Resource, DiEdge], Map[String, OWLAxiom]) | ||
| 54 | |||
| 55 | /** Manager instance to interface with OWLAPI | ||
| 56 | * | ||
| 57 | * TODO: turn this into an implicit class parameter. | ||
| 58 | */ | ||
| 59 | val manager = OWLManager.createOWLOntologyManager() | ||
| 60 | |||
| 61 | /** Compute the RSA dependency graph for a set of axioms | ||
| 62 | * | ||
| 63 | * @param axioms set of input axioms (TBox) to build the dependency | ||
| 64 | * graph. | ||
| 65 | * @param datafiles data (ABox) to build the dependency graph. | ||
| 66 | * @param unsafe list of unsafe roles in the TBox. | ||
| 67 | * | ||
| 68 | * @return a tuple containing the dependency graph and a map between | ||
| 69 | * the newly introduced constants and the corresponding input axioms. | ||
| 70 | * | ||
| 71 | * @note no check on the ontology language is performed since the | ||
| 72 | * construction of the dependency graph is computed regardless. The | ||
| 73 | * input axioms are assumed to be normalized. | ||
| 74 | */ | ||
| 75 | def dependencyGraph( | ||
| 76 | axioms: List[OWLLogicalAxiom], | ||
| 77 | datafiles: List[File], | ||
| 78 | unsafe: List[OWLObjectPropertyExpression] | ||
| 79 | ): DependencyGraph = { | ||
| 80 | |||
| 81 | var nodemap = Map.empty[String, OWLAxiom] | ||
| 82 | |||
| 83 | /* Create custom converter */ | ||
| 84 | object RSAConverter extends RDFoxConverter { | ||
| 85 | |||
| 86 | import org.semanticweb.owlapi.model.{ | ||
| 87 | OWLClassExpression, | ||
| 88 | OWLObjectSomeValuesFrom, | ||
| 89 | OWLDataSomeValuesFrom | ||
| 90 | } | ||
| 91 | |||
| 92 | override def convert( | ||
| 93 | expr: OWLClassExpression, | ||
| 94 | term: Term, | ||
| 95 | unsafe: List[OWLObjectPropertyExpression], | ||
| 96 | skolem: SkolemStrategy, | ||
| 97 | suffix: RSASuffix | ||
| 98 | ): Shards = | ||
| 99 | (expr, skolem) match { | ||
| 100 | |||
| 101 | case (e: OWLObjectSomeValuesFrom, c: Constant) => { | ||
| 102 | nodemap.update(c.iri.getIRI, c.axiom) | ||
| 103 | val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) | ||
| 104 | if (unsafe contains e.getProperty) | ||
| 105 | (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) | ||
| 106 | else | ||
| 107 | (RSA.PE(term, c.iri) :: res, ext) | ||
| 108 | } | ||
| 109 | |||
| 110 | case (e: OWLDataSomeValuesFrom, c: Constant) => { | ||
| 111 | nodemap.update(c.iri.getIRI, c.axiom) | ||
| 112 | val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) | ||
| 113 | if (unsafe contains e.getProperty) | ||
| 114 | (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) | ||
| 115 | else | ||
| 116 | (RSA.PE(term, c.iri) :: res, ext) | ||
| 117 | } | ||
| 118 | |||
| 119 | case _ => super.convert(expr, term, unsafe, skolem, suffix) | ||
| 120 | } | ||
| 121 | } | ||
| 122 | |||
| 123 | /* Ontology convertion into LP rules */ | ||
| 124 | val term = RSAUtil.genFreshVariable() | ||
| 125 | val result = axioms.map(a => | ||
| 126 | RSAConverter.convert(a, term, unsafe, new Constant(a), Empty) | ||
| 127 | ) | ||
| 128 | |||
| 129 | val datalog = result.unzip | ||
| 130 | val facts = datalog._1.flatten | ||
| 131 | var rules = datalog._2.flatten | ||
| 132 | |||
| 133 | /* Open connection with RDFox */ | ||
| 134 | val (server, data) = RDFoxUtil.openConnection("rsa_dependency_graph") | ||
| 135 | |||
| 136 | /* Add additional built-in rules */ | ||
| 137 | val varX = Variable.create("X") | ||
| 138 | val varY = Variable.create("Y") | ||
| 139 | rules = Rule.create( | ||
| 140 | RSA.E(varX, varY), | ||
| 141 | RSA.PE(varX, varY), | ||
| 142 | RSA.U(varX), | ||
| 143 | RSA.U(varY) | ||
| 144 | ) :: rules | ||
| 145 | /* Load facts and rules from ontology */ | ||
| 146 | RDFoxUtil.addFacts(data, facts) | ||
| 147 | RDFoxUtil.addRules(data, rules) | ||
| 148 | /* Load data files */ | ||
| 149 | RDFoxUtil.addData(data, datafiles: _*) | ||
| 150 | |||
| 151 | /* Build the graph */ | ||
| 152 | val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }" | ||
| 153 | val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get | ||
| 154 | var edges: Seq[DiEdge[Resource]] = | ||
| 155 | answers.collect { case (_, Seq(n1, n2)) => n1 ~> n2 } | ||
| 156 | val graph = Graph(edges: _*) | ||
| 157 | |||
| 158 | /* Close connection to RDFox */ | ||
| 159 | RDFoxUtil.closeConnection(server, data) | ||
| 160 | |||
| 161 | (graph, nodemap) | ||
| 162 | } | ||
| 163 | |||
| 164 | def apply(axioms: List[OWLLogicalAxiom], datafiles: List[File]): Ontology = | ||
| 165 | new Ontology(axioms, datafiles) | ||
| 166 | |||
| 167 | def apply(ontology: OWLOntology, datafiles: List[File]): Ontology = { | ||
| 168 | |||
| 169 | /** TBox axioms */ | ||
| 170 | var tbox: List[OWLLogicalAxiom] = | ||
| 171 | ontology | ||
| 172 | .tboxAxioms(Imports.INCLUDED) | ||
| 173 | .collect(Collectors.toList()) | ||
| 174 | .collect { case a: OWLLogicalAxiom => a } | ||
| 175 | |||
| 176 | /** RBox axioms */ | ||
| 177 | var rbox: List[OWLLogicalAxiom] = | ||
| 178 | ontology | ||
| 179 | .rboxAxioms(Imports.INCLUDED) | ||
| 180 | .collect(Collectors.toList()) | ||
| 181 | .collect { case a: OWLLogicalAxiom => a } | ||
| 182 | |||
| 183 | /** ABox axioms | ||
| 184 | * | ||
| 185 | * @note this represents only the set of assertions contained in the | ||
| 186 | * ontology file. Data files specified in `datafiles` are directly | ||
| 187 | * imported in RDFox due to performance issues when trying to import | ||
| 188 | * large data files via OWLAPI. | ||
| 189 | */ | ||
| 190 | var abox: List[OWLLogicalAxiom] = | ||
| 191 | ontology | ||
| 192 | .aboxAxioms(Imports.INCLUDED) | ||
| 193 | .collect(Collectors.toList()) | ||
| 194 | .collect { case a: OWLLogicalAxiom => a } | ||
| 195 | |||
| 196 | Ontology(abox ::: tbox ::: rbox, datafiles) | ||
| 197 | } | ||
| 198 | |||
| 199 | def apply(ontofile: File, datafiles: List[File]): Ontology = { | ||
| 200 | val ontology = manager.loadOntologyFromOntologyDocument(ontofile) | ||
| 201 | Ontology(ontology, datafiles) | ||
| 202 | } | ||
| 203 | |||
| 204 | } | ||
| 205 | |||
| 206 | /** A wrapper for a generic OWL2 ontology | ||
| 207 | * | ||
| 208 | * @param axioms list of axioms (roughly) corresponding to the TBox. | ||
| 209 | * @param datafiles files containing ABox data. | ||
| 210 | */ | ||
| 211 | class Ontology(val axioms: List[OWLLogicalAxiom], val datafiles: List[File]) { | ||
| 212 | |||
| 213 | /** Extend OWLAxiom functionalities */ | ||
| 214 | import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ | ||
| 215 | |||
| 216 | /** Simplify conversion between Java and Scala collections */ | ||
| 217 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | ||
| 218 | |||
| 219 | println(s"Axioms: ${axioms.length}") | ||
| 220 | |||
| 221 | /** OWLOntology based on input axioms | ||
| 222 | * | ||
| 223 | * This is mainly used to instantiate a new reasoner to be used in | ||
| 224 | * the computation of unsafe roles. | ||
| 225 | */ | ||
| 226 | protected val ontology: OWLOntology = | ||
| 227 | Ontology.manager.createOntology((axioms: List[OWLAxiom]).asJava) | ||
| 228 | |||
| 229 | /** OWLAPI internal reasoner for ontology */ | ||
| 230 | protected val reasoner = | ||
| 231 | (new StructuralReasonerFactory()).createReasoner(ontology) | ||
| 232 | |||
| 233 | /** Unsafe roles in the ontology | ||
| 234 | * | ||
| 235 | * Unsafety conditions are the following: | ||
| 236 | * | ||
| 237 | * 1) For all roles r1 appearing in an axiom of type T5, r1 is unsafe | ||
| 238 | * if there exists a role r2 (different from top) appearing in an | ||
| 239 | * axiom of type T3 and r1 is a subproperty of the inverse of r2. | ||
| 240 | * | ||
| 241 | * 2) For all roles p1 appearing in an axiom of type T5, p1 is unsafe | ||
| 242 | * if there exists a role p2 appearing in an axiom of type T4 and | ||
| 243 | * p1 is a subproperty of either p2 or the inverse of p2. | ||
| 244 | */ | ||
| 245 | lazy val unsafe: List[OWLObjectPropertyExpression] = { | ||
| 246 | |||
| 247 | /* Checking for unsafety condition (1) */ | ||
| 248 | val unsafe1 = for { | ||
| 249 | axiom <- axioms | ||
| 250 | if axiom.isT5 | ||
| 251 | role1 <- axiom.objectPropertyExpressionsInSignature | ||
| 252 | roleSuper = role1 +: reasoner.superObjectProperties(role1) | ||
| 253 | axiom <- axioms | ||
| 254 | if axiom.isT3 && !axiom.isT3top | ||
| 255 | role2 <- axiom.objectPropertyExpressionsInSignature | ||
| 256 | if roleSuper contains role2.getInverseProperty | ||
| 257 | } yield role1 | ||
| 258 | |||
| 259 | /* Checking for unsafety condition (2) */ | ||
| 260 | val unsafe2 = for { | ||
| 261 | axiom <- axioms | ||
| 262 | if axiom.isT5 | ||
| 263 | role1 <- axiom.objectPropertyExpressionsInSignature | ||
| 264 | roleSuper = role1 +: reasoner.superObjectProperties(role1) | ||
| 265 | axiom <- axioms | ||
| 266 | if axiom.isT4 | ||
| 267 | role2 <- axiom.objectPropertyExpressionsInSignature | ||
| 268 | if roleSuper.contains(role2) || | ||
| 269 | roleSuper.contains(role2.getInverseProperty) | ||
| 270 | } yield role1 | ||
| 271 | |||
| 272 | unsafe1 ++ unsafe2 | ||
| 273 | } | ||
| 274 | |||
| 275 | /** Compute the dependency graph for the ontology */ | ||
| 276 | lazy val dependencyGraph: Ontology.DependencyGraph = | ||
| 277 | Ontology.dependencyGraph(axioms, datafiles, this.unsafe) | ||
| 278 | |||
| 279 | /** RSA check */ | ||
| 280 | lazy val isRSA: Boolean = ??? | ||
| 281 | |||
| 282 | /** Normalize the ontology according to the given normalizer | ||
| 283 | * | ||
| 284 | * @param normalizer the normalization technique to be used. | ||
| 285 | * @return a new normalized [[Ontology]]. | ||
| 286 | */ | ||
| 287 | def normalize(normalizer: Normalizer): Ontology = | ||
| 288 | new Ontology( | ||
| 289 | axioms flatMap normalizer.normalize, | ||
| 290 | datafiles | ||
| 291 | ) | ||
| 292 | |||
| 293 | /** Approximate the ontology according to the given approximation | ||
| 294 | * technique. | ||
| 295 | * | ||
| 296 | * @param approximation the approximation to be used on the ontology. | ||
| 297 | * @return the result of the approximation. | ||
| 298 | */ | ||
| 299 | def approximate[T](approximation: Approximation[T]): T = | ||
| 300 | approximation.approximate(this) | ||
| 301 | } | ||
