From 30b199205f3a16cc92577393ddf9b5d7b36d9dc3 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Tue, 27 Jul 2021 12:52:48 +0100 Subject: Add skeleton for upperbound computation --- .../ox/cs/rsacomb/approximation/Upperbound.scala | 183 +++++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100644 src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala new file mode 100644 index 0000000..ba26113 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala @@ -0,0 +1,183 @@ +package uk.ac.ox.cs.rsacomb.approximation + +// import java.io.File + +import org.semanticweb.owlapi.apibinding.OWLManager +import org.semanticweb.owlapi.model.{IRI => _, _} + +// import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, IRI} + +// import scala.collection.mutable.{Set, Map} +// import scalax.collection.Graph +// import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ +// import scalax.collection.GraphTraversal._ + +import uk.ac.ox.cs.rsacomb.RSAOntology +// import uk.ac.ox.cs.rsacomb.RSAUtil +import uk.ac.ox.cs.rsacomb.ontology.Ontology + +object UpperBound { + + private val manager = OWLManager.createOWLOntologyManager() + private val factory = manager.getOWLDataFactory() + +} + +/** Approximation algorithm that mantains completeness for CQ answering. + * + * The input OWL 2 ontology is assumed to be normalized and the output + * ontology is guaranteed to be in RSA. + * + * The algorithm is performed in three steps: + * 1. the ontology is reduced to ALCHOIQ by discarding any axiom + * that is not in the language; + * 2. the ontology is further reduced to Horn-ALCHOIQ by shifting + * axioms with disjunction on the rhs; + * 3. the ontology is approximated to RSA by manipulating its + * dependency graph. + * + * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] + */ +class UpperBound extends Approximation[RSAOntology] { + + /** Simplify conversion between Java and Scala collections */ + import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ + + /** Simplify conversion between OWLAPI and RDFox concepts */ + // import uk.ac.ox.cs.rsacomb.implicits.RDFox._ + + /** Main entry point for the approximation algorithm */ + def approximate(ontology: Ontology): RSAOntology = + toRSA( + new Ontology( + ontology.axioms filter inALCHOIQ flatMap toConjuncts, + ontology.datafiles + ) + ) + + /** Discards all axioms outside ALCHOIQ */ + private def inALCHOIQ(axiom: OWLLogicalAxiom): Boolean = + axiom match { + case a: OWLSubClassOfAxiom => { + val sub = a.getSubClass.getNNF + val sup = a.getSuperClass.getNNF + (sub, sup) match { + case (sub: OWLObjectAllValuesFrom, _) => false + case (sub: OWLDataAllValuesFrom, _) => false + case (_, sup: OWLDataAllValuesFrom) => false + case (sub: OWLObjectMinCardinality, _) if sub.getCardinality >= 2 => + false + case (sub: OWLDataMinCardinality, _) if sub.getCardinality >= 2 => + false + case (_, sup: OWLObjectMinCardinality) if sup.getCardinality >= 2 => + false + case (_, sup: OWLDataMinCardinality) if sup.getCardinality >= 2 => + false + case (sub: OWLObjectMaxCardinality, _) => false + case (sub: OWLDataMaxCardinality, _) => false + case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => + false + case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => + false + case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => + false + case (sub: OWLObjectHasSelf, _) => false + case (_, sup: OWLObjectHasSelf) => false + case _ => true + } + } + case a: OWLTransitiveObjectPropertyAxiom => false + case a: OWLReflexiveObjectPropertyAxiom => false + case a: OWLSubPropertyChainOfAxiom => false + case a: OWLAsymmetricObjectPropertyAxiom => false + case a => true + } + + /** Turn disjuncts into conjuncts + * + * This is a very naive way of getting rid of disjunction preserving + * completeness of CQ answering. + * + * @todo implement a choice function that decides which disjunct to + * keep instead of keeping all of them. Note that PAGOdA is currently + * doing something similar. + */ + private def toConjuncts(axiom: OWLLogicalAxiom): List[OWLLogicalAxiom] = + axiom match { + case a: OWLSubClassOfAxiom => { + val sub = a.getSubClass.getNNF + val sup = a.getSuperClass.getNNF + sup match { + case sup: OWLObjectUnionOf => + sup.asDisjunctSet.map( + UpperBound.factory.getOWLSubClassOfAxiom(sub, _) + ) + case _ => List(axiom) + } + } + case _ => List(axiom) + } + + // /** Approximate a Horn-ALCHOIQ ontology to RSA + // * + // * This is done by gathering those axioms that prevent the ontology + // * dependency graph from being tree-shaped, and removing them. + // * + // * @param ontology the set of axioms to approximate. + // * @return the approximated RSA ontology + // */ + // private def toRSA(ontology: Ontology): RSAOntology = { + // /* Compute the dependency graph for the ontology */ + // val (graph, nodemap) = ontology.dependencyGraph + + // /* Define node colors for the graph visit */ + // sealed trait NodeColor + // case object Unvisited extends NodeColor + // case object Visited extends NodeColor + // case object ToDelete extends NodeColor + + // /* Keep track of node colors during graph visit */ + // var color = Map.from[Resource, NodeColor]( + // graph.nodes.toOuter.map(k => (k, Unvisited)) + // ) + + // for { + // component <- graph.componentTraverser().map(_ to Graph) + // edge <- component + // .outerEdgeTraverser(component.nodes.head) + // .withKind(BreadthFirst) + // } yield { + // val source = edge._1 + // val target = edge._2 + // color(source) match { + // case Unvisited | Visited => { + // color(target) match { + // case Unvisited => + // color(source) = Visited; + // color(target) = Visited + // case Visited => + // color(source) = ToDelete + // case ToDelete => + // color(source) = Visited + // } + // } + // case ToDelete => + // } + // } + + // val toDelete = color.collect { case (resource: IRI, ToDelete) => + // nodemap(resource.getIRI) + // }.toList + + // /* Remove axioms from approximated ontology */ + // RSAOntology(ontology.axioms diff toDelete, ontology.datafiles) + // } + + // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> + // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F, + // 'C ~> 'G, 'D ~> 'C, 'H ~> 'D) + // val edges2 = Seq('I ~> 'M, 'I ~> 'L, 'L ~> 'N, 'M ~> 'N) + // val edges3 = Seq('P ~> 'O) + // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3) + // +} -- cgit v1.2.3 From cbfa987d5c8d2f550d509c0a3d8226f302df476a Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Thu, 29 Jul 2021 11:54:44 +0100 Subject: Change spelling: LowerBound to Lowerbound --- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 4 ++-- .../uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index 258c226..4c63e17 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -28,7 +28,7 @@ import sparql.ConjunctiveQuery import uk.ac.ox.cs.rsacomb.ontology.Ontology import uk.ac.ox.cs.rsacomb.converter.Normalizer -import uk.ac.ox.cs.rsacomb.approximation.LowerBound +import uk.ac.ox.cs.rsacomb.approximation.Lowerbound case class RSAOption[+T](opt: T) { def get[T]: T = opt.asInstanceOf[T] @@ -133,7 +133,7 @@ object RSAComb extends App { ).normalize(new Normalizer) /* Approximate the ontology to RSA */ - val toRSA = new LowerBound + val toRSA = new Lowerbound val rsa = ontology approximate toRSA if (config contains 'query) { diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala index 60a88fb..290cbaf 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala @@ -16,7 +16,7 @@ import uk.ac.ox.cs.rsacomb.RSAOntology import uk.ac.ox.cs.rsacomb.RSAUtil import uk.ac.ox.cs.rsacomb.ontology.Ontology -object LowerBound { +object Lowerbound { private val manager = OWLManager.createOWLOntologyManager() private val factory = manager.getOWLDataFactory() @@ -38,7 +38,7 @@ object LowerBound { * * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] */ -class LowerBound extends Approximation[RSAOntology] { +class Lowerbound extends Approximation[RSAOntology] { /** Simplify conversion between Java and Scala collections */ import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ @@ -130,19 +130,19 @@ class LowerBound extends Approximation[RSAOntology] { ) val r1 = - LowerBound.factory.getOWLSubClassOfAxiom( - LowerBound.factory.getOWLObjectIntersectionOf( + Lowerbound.factory.getOWLSubClassOfAxiom( + Lowerbound.factory.getOWLObjectIntersectionOf( (body.map(_._1) ++ head.map(_._2)): _* ), - LowerBound.factory.getOWLNothing + Lowerbound.factory.getOWLNothing ) val r2s = for { (a, na) <- head hs = head.map(_._2).filterNot(_ equals na) - } yield LowerBound.factory.getOWLSubClassOfAxiom( - LowerBound.factory.getOWLObjectIntersectionOf( + } yield Lowerbound.factory.getOWLSubClassOfAxiom( + Lowerbound.factory.getOWLObjectIntersectionOf( (body.map(_._1) ++ hs): _* ), a @@ -152,8 +152,8 @@ class LowerBound extends Approximation[RSAOntology] { for { (a, na) <- body bs = body.map(_._1).filterNot(_ equals a) - } yield LowerBound.factory.getOWLSubClassOfAxiom( - LowerBound.factory.getOWLObjectIntersectionOf( + } yield Lowerbound.factory.getOWLSubClassOfAxiom( + Lowerbound.factory.getOWLObjectIntersectionOf( (bs ++ head.map(_._2)): _* ), na -- cgit v1.2.3 From 9256241855a2b0eb21c5af01cf5ca0e3b25524d3 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Thu, 29 Jul 2021 11:55:10 +0100 Subject: Implement fine-grained constant skolemization in upperbound --- .../ox/cs/rsacomb/approximation/Upperbound.scala | 228 ++++++++++++--------- 1 file changed, 128 insertions(+), 100 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala index ba26113..65cdee1 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala @@ -5,18 +5,18 @@ package uk.ac.ox.cs.rsacomb.approximation import org.semanticweb.owlapi.apibinding.OWLManager import org.semanticweb.owlapi.model.{IRI => _, _} -// import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, IRI} +import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, IRI} -// import scala.collection.mutable.{Set, Map} -// import scalax.collection.Graph -// import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ -// import scalax.collection.GraphTraversal._ +import scala.collection.mutable.Map +import scalax.collection.Graph +import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ +import scalax.collection.GraphTraversal._ import uk.ac.ox.cs.rsacomb.RSAOntology -// import uk.ac.ox.cs.rsacomb.RSAUtil +import uk.ac.ox.cs.rsacomb.RSAUtil import uk.ac.ox.cs.rsacomb.ontology.Ontology -object UpperBound { +object Upperbound { private val manager = OWLManager.createOWLOntologyManager() private val factory = manager.getOWLDataFactory() @@ -38,7 +38,7 @@ object UpperBound { * * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] */ -class UpperBound extends Approximation[RSAOntology] { +class Upperbound extends Approximation[RSAOntology] { /** Simplify conversion between Java and Scala collections */ import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ @@ -50,52 +50,52 @@ class UpperBound extends Approximation[RSAOntology] { def approximate(ontology: Ontology): RSAOntology = toRSA( new Ontology( - ontology.axioms filter inALCHOIQ flatMap toConjuncts, + ontology.axioms flatMap toConjuncts, ontology.datafiles ) ) /** Discards all axioms outside ALCHOIQ */ - private def inALCHOIQ(axiom: OWLLogicalAxiom): Boolean = - axiom match { - case a: OWLSubClassOfAxiom => { - val sub = a.getSubClass.getNNF - val sup = a.getSuperClass.getNNF - (sub, sup) match { - case (sub: OWLObjectAllValuesFrom, _) => false - case (sub: OWLDataAllValuesFrom, _) => false - case (_, sup: OWLDataAllValuesFrom) => false - case (sub: OWLObjectMinCardinality, _) if sub.getCardinality >= 2 => - false - case (sub: OWLDataMinCardinality, _) if sub.getCardinality >= 2 => - false - case (_, sup: OWLObjectMinCardinality) if sup.getCardinality >= 2 => - false - case (_, sup: OWLDataMinCardinality) if sup.getCardinality >= 2 => - false - case (sub: OWLObjectMaxCardinality, _) => false - case (sub: OWLDataMaxCardinality, _) => false - case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => - false - case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => - false - case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => - false - case (sub: OWLObjectHasSelf, _) => false - case (_, sup: OWLObjectHasSelf) => false - case _ => true - } - } - case a: OWLTransitiveObjectPropertyAxiom => false - case a: OWLReflexiveObjectPropertyAxiom => false - case a: OWLSubPropertyChainOfAxiom => false - case a: OWLAsymmetricObjectPropertyAxiom => false - case a => true - } + // private def inALCHOIQ(axiom: OWLLogicalAxiom): Boolean = + // axiom match { + // case a: OWLSubClassOfAxiom => { + // val sub = a.getSubClass.getNNF + // val sup = a.getSuperClass.getNNF + // (sub, sup) match { + // case (sub: OWLObjectAllValuesFrom, _) => false + // case (sub: OWLDataAllValuesFrom, _) => false + // case (_, sup: OWLDataAllValuesFrom) => false + // case (sub: OWLObjectMinCardinality, _) if sub.getCardinality >= 2 => + // false + // case (sub: OWLDataMinCardinality, _) if sub.getCardinality >= 2 => + // false + // case (_, sup: OWLObjectMinCardinality) if sup.getCardinality >= 2 => + // false + // case (_, sup: OWLDataMinCardinality) if sup.getCardinality >= 2 => + // false + // case (sub: OWLObjectMaxCardinality, _) => false + // case (sub: OWLDataMaxCardinality, _) => false + // case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => + // false + // case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => + // false + // case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => + // false + // case (sub: OWLObjectHasSelf, _) => false + // case (_, sup: OWLObjectHasSelf) => false + // case _ => true + // } + // } + // case a: OWLTransitiveObjectPropertyAxiom => false + // case a: OWLReflexiveObjectPropertyAxiom => false + // case a: OWLSubPropertyChainOfAxiom => false + // case a: OWLAsymmetricObjectPropertyAxiom => false + // case a => true + // } /** Turn disjuncts into conjuncts * - * This is a very naive way of getting rid of disjunction preserving + * This is a very naïve way of getting rid of disjunction preserving * completeness of CQ answering. * * @todo implement a choice function that decides which disjunct to @@ -110,7 +110,7 @@ class UpperBound extends Approximation[RSAOntology] { sup match { case sup: OWLObjectUnionOf => sup.asDisjunctSet.map( - UpperBound.factory.getOWLSubClassOfAxiom(sub, _) + Upperbound.factory.getOWLSubClassOfAxiom(sub, _) ) case _ => List(axiom) } @@ -118,60 +118,88 @@ class UpperBound extends Approximation[RSAOntology] { case _ => List(axiom) } - // /** Approximate a Horn-ALCHOIQ ontology to RSA - // * - // * This is done by gathering those axioms that prevent the ontology - // * dependency graph from being tree-shaped, and removing them. - // * - // * @param ontology the set of axioms to approximate. - // * @return the approximated RSA ontology - // */ - // private def toRSA(ontology: Ontology): RSAOntology = { - // /* Compute the dependency graph for the ontology */ - // val (graph, nodemap) = ontology.dependencyGraph - - // /* Define node colors for the graph visit */ - // sealed trait NodeColor - // case object Unvisited extends NodeColor - // case object Visited extends NodeColor - // case object ToDelete extends NodeColor - - // /* Keep track of node colors during graph visit */ - // var color = Map.from[Resource, NodeColor]( - // graph.nodes.toOuter.map(k => (k, Unvisited)) - // ) - - // for { - // component <- graph.componentTraverser().map(_ to Graph) - // edge <- component - // .outerEdgeTraverser(component.nodes.head) - // .withKind(BreadthFirst) - // } yield { - // val source = edge._1 - // val target = edge._2 - // color(source) match { - // case Unvisited | Visited => { - // color(target) match { - // case Unvisited => - // color(source) = Visited; - // color(target) = Visited - // case Visited => - // color(source) = ToDelete - // case ToDelete => - // color(source) = Visited - // } - // } - // case ToDelete => - // } - // } + /** Approximate a Horn-ALCHOIQ ontology to RSA + * + * This is done by gathering those existential axioms that prevent + * the ontology dependency graph from being tree-shaped and constant + * skolemize them. + * + * @param ontology the set of axioms to approximate. + * @return the approximated RSA ontology + */ + private def toRSA(ontology: Ontology): RSAOntology = { + /* Compute the dependency graph for the ontology */ + val (graph, nodemap) = ontology.dependencyGraph + + /* Define node colors for the graph visit */ + sealed trait NodeColor + case object Unvisited extends NodeColor + case object Visited extends NodeColor + case object ToSkolem extends NodeColor + + /* Keep track of node colors during graph visit */ + var color = Map.from[Resource, NodeColor]( + graph.nodes.toOuter.map(k => (k, Unvisited)) + ) + + for { + component <- graph.componentTraverser().map(_ to Graph) + edge <- component + .outerEdgeTraverser(component.nodes.head) + .withKind(BreadthFirst) + } yield { + val source = edge._1 + val target = edge._2 + color(source) match { + case Unvisited | Visited => { + color(target) match { + case Unvisited => + color(source) = Visited; + color(target) = Visited + case Visited => + color(source) = ToSkolem + case ToSkolem => + color(source) = Visited + } + } + case ToSkolem => {} + } + } - // val toDelete = color.collect { case (resource: IRI, ToDelete) => - // nodemap(resource.getIRI) - // }.toList + val toSkolem = color.collect { case (resource: IRI, ToSkolem) => + nodemap(resource.getIRI) + }.toList + + // Force constant skolemization by introducing a fresh individual + // (singleton class). + val skolemized = toSkolem flatMap { (axiom) => + import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ + axiom.toTriple match { + case Some((subclass, role, filler)) => { + val skolem = Upperbound.factory.getOWLNamedIndividual(s"i_${axiom.toString.hashCode}") + val fresh = RSAUtil.getFreshOWLClass() + List( + Upperbound.factory.getOWLSubClassOfAxiom( + subclass, + Upperbound.factory.getOWLObjectSomeValuesFrom(role, fresh) + ), + Upperbound.factory.getOWLSubClassOfAxiom( + fresh, + Upperbound.factory.getOWLObjectOneOf(skolem) + ), + Upperbound.factory.getOWLClassAssertionAxiom(filler, skolem) + ) + } + case None => List() + } + } - // /* Remove axioms from approximated ontology */ - // RSAOntology(ontology.axioms diff toDelete, ontology.datafiles) - // } + /* Substitute selected axioms with their "skolemized" version */ + RSAOntology( + ontology.axioms diff toSkolem concat skolemized, + ontology.datafiles + ) + } // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> // 'G, 'G ~> 'F, 'E ~> 'A, 'E ~> 'F, 'B ~> 'E, 'F ~> 'G, 'B ~> 'F, @@ -179,5 +207,5 @@ class UpperBound extends Approximation[RSAOntology] { // val edges2 = Seq('I ~> 'M, 'I ~> 'L, 'L ~> 'N, 'M ~> 'N) // val edges3 = Seq('P ~> 'O) // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3) - // + } -- cgit v1.2.3 From 71367fb626710dcdca0fa09f1902b521c966ef71 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Mon, 2 Aug 2021 09:44:50 +0100 Subject: Update normalizer to handle additional OWL axioms Also the normalizer should not distinguish ALCHOIQ axioms from non-ALCHOIQ axioms, and should try its best to return a normalised axiom. --- .../uk/ac/ox/cs/rsacomb/converter/Normalizer.scala | 208 ++++++++++----------- 1 file changed, 97 insertions(+), 111 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala index 285040e..b5da3cc 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala @@ -43,21 +43,7 @@ class Normalizer() { /** Simplify conversion between Java and Scala collections */ import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ - /** Statistics */ - var discarded = 0 - var shifted = 0 - - /** Normalizes a - * [[org.semanticweb.owlapi.model.OWLLogicalAxiom OWLLogicalAxiom]] - * - * @note not all possible axioms are supported. Following is a list - * of all unhandled class expressions: - * - [[org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom OWLAsymmetricObjectPropertyAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom OWLDatatypeDefinitionAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom OWLDisjointDataPropertiesAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom OWLDisjointObjectPropertiesAxiom]] - * - [[org.semanticweb.owlapi.model.OWLHasKeyAxiom OWLHasKeyAxiom]] - * - [[org.semanticweb.owlapi.model.SWRLRule SWRLRule]] + /** Normalizes a [[OWLLogicalAxiom]] */ def normalize(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = axiom match { @@ -138,12 +124,37 @@ class Normalizer() { ) } } - /** Disjunction on the rhs is not supported directly + /** Disjunction on the rhs * - * Instead we `shift` the rule to eliminate the disjunction. + * B c A1 u ... u C u ... u An -> { X c C, B c A1 u ... u X u ... u An } */ - case (_, sup: OWLObjectUnionOf) => - shift(sub, sup) flatMap normalize + case (_, sup: OWLObjectUnionOf) + if sup.asDisjunctSet.exists(c => !c.isOWLClass) => { + var additional = Seq() + val disjuncts = sup.asDisjunctSet + // BUG: why test for legth if this branch gets triggered only + // when there exists a ClassExpression in the disjuncts? + if (disjuncts.length > 0) { + val acc = (Seq[OWLClassExpression](), Seq[OWLLogicalAxiom]()) + val (acc1, acc2) = disjuncts.foldLeft(acc)( + { case ((acc1, acc2), disj: OWLClass) => (acc1 :+ disj, acc2) + case ((acc1, acc2), disj) => { + val cls = RSAUtil.getFreshOWLClass() + ( + acc1 :+ cls, + acc2 :+ factory.getOWLSubClassOfAxiom(cls, disj) + ) + } + } + ) + (acc2 :+ factory.getOWLSubClassOfAxiom( + sub, + factory.getOWLObjectUnionOf(acc1: _*) + )).flatMap(normalize) + } else { + normalize(factory.getOWLSubClassOfAxiom(sub, factory.getOWLNothing)) + } + } /** Complex class expression on existential restriction on the lhs * * exists R . C c D -> { C c X, exists R . X c D } @@ -174,9 +185,35 @@ class Normalizer() { ) ).flatMap(normalize) } - /** Object/Data universal quantification on the lhs not supported */ - case (sub: OWLObjectAllValuesFrom, _) => notInHornALCHOIQ(a) - case (sub: OWLDataAllValuesFrom, _) => notInHornALCHOIQ(a) + /** Object universal quantification on the lhs + * + * forall R . B c A + * ¬ A c ¬∀forall R . B + * ¬ A c exists R . ¬ B + * ¬ A c C, C c R . ¬ B + * top c A u C, D c ¬ B, C c exists R . D + * top c A u C, D n B c bot, C c exists R . D + */ + case (sub: OWLObjectAllValuesFrom, _) => { + val role = sub.getProperty + val filler = sub.getFiller + val (c, d) = (RSAUtil.getFreshOWLClass, RSAUtil.getFreshOWLClass) + Seq( + factory.getOWLSubClassOfAxiom( + factory.getOWLThing, + factory.getOWLObjectUnionOf(sup, c) + ), + factory.getOWLSubClassOfAxiom( + factory.getOWLObjectIntersectionOf(d, filler), + factory.getOWLNothing + ), + factory.getOWLSubClassOfAxiom( + c, factory.getOWLObjectSomeValuesFrom(role, d) + ) + ) + } + /** Object/Data universal quantification on the lhs */ + case (sub: OWLDataAllValuesFrom, _) => notSupported(a) /** Object universal quantification on the rhs * * C c forall R . D -> exists R- . C c D @@ -193,7 +230,7 @@ class Normalizer() { ) ) /** Object universal quantification on the rhs not supported */ - case (_, sup: OWLDataAllValuesFrom) => notInHornALCHOIQ(a) + case (_, sup: OWLDataAllValuesFrom) => notSupported(a) /** Exact object/data cardinality restriction on the lhs/rhs * * = i R . C -> <= i R . C n >= i R . X @@ -237,7 +274,7 @@ class Normalizer() { sup ) ) - case _ => notInHornALCHOIQ(a) + case _ => notSupported(a) } case (sub: OWLDataMinCardinality, _) => sub.getCardinality match { @@ -255,7 +292,7 @@ class Normalizer() { sup ) ) - case _ => notInHornALCHOIQ(a) + case _ => notSupported(a) } case (_, sup: OWLObjectMinCardinality) => sup.getCardinality match { @@ -270,7 +307,7 @@ class Normalizer() { ) ) ) - case _ => notInHornALCHOIQ(a) + case _ => notSupported(a) } case (_, sup: OWLDataMinCardinality) => sup.getCardinality match { @@ -285,11 +322,11 @@ class Normalizer() { ) ) ) - case _ => notInHornALCHOIQ(a) + case _ => notSupported(a) } /** Max object/data cardinality restriction on the lhs not supported */ - case (sub: OWLObjectMaxCardinality, _) => notInHornALCHOIQ(a) - case (sub: OWLDataMaxCardinality, _) => notInHornALCHOIQ(a) + case (sub: OWLObjectMaxCardinality, _) => notSupported(a) + case (sub: OWLDataMaxCardinality, _) => notSupported(a) /** Max object/data cardinality restriction on the rhs * * C c <= 0 R . D -> C n exists R . D -> bot @@ -320,7 +357,7 @@ class Normalizer() { ).flatMap(normalize) } case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => - notInHornALCHOIQ(a) + notSupported(a) case (_, sup: OWLDataMaxCardinality) if sup.getCardinality == 0 => normalize( factory.getOWLSubClassOfAxiom( @@ -333,7 +370,7 @@ class Normalizer() { ) ) case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => - notInHornALCHOIQ(a) + notSupported(a) /** HasValue expression on the lhs/rhs * * HasValue(R, a) -> exists R . {a} @@ -385,21 +422,27 @@ class Normalizer() { case (sub: OWLObjectOneOf, _) => sub.getIndividuals.map(factory.getOWLClassAssertionAxiom(sup, _)) /** Enumeration of individuals on the rhs - * It's supported only when of cardinality < 2. + * + * A c {a1, ... ,an} -> { A c {a1} u ... u {an} } */ - case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length == 0 => - normalize(factory.getOWLSubClassOfAxiom(sub, factory.getOWLNothing)) case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => - notInHornALCHOIQ(a) + normalize( + factory.getOWLSubClassOfAxiom( + sub, + factory.getOWLObjectUnionOf( + sup.getIndividuals.map(factory.getOWLObjectOneOf(_)) + ) + ) + ) /** Class complement on the lhs * - * ~C c D -> top c C n D + * ~C c D -> top c C u D */ case (sub: OWLObjectComplementOf, _) => normalize( factory.getOWLSubClassOfAxiom( factory.getOWLThing, - factory.getOWLObjectIntersectionOf(sub.getComplementNNF, sup) + factory.getOWLObjectUnionOf(sub.getComplementNNF, sup) ) ) /** Class complement on the rhs @@ -414,8 +457,8 @@ class Normalizer() { ) ) /** Self-restriction over an object property */ - case (sub: OWLObjectHasSelf, _) => notInHornALCHOIQ(a) - case (_, sup: OWLObjectHasSelf) => notInHornALCHOIQ(a) + case (sub: OWLObjectHasSelf, _) => notSupported(a) + case (_, sup: OWLObjectHasSelf) => notSupported(a) /** Axiom is already normalized */ case _ => Seq(a) @@ -513,17 +556,17 @@ class Normalizer() { case a: OWLNegativeDataPropertyAssertionAxiom => normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) - /** Not in Horn-ALCHOIQ */ - - case a: OWLTransitiveObjectPropertyAxiom => notInHornALCHOIQ(a) - - case a: OWLReflexiveObjectPropertyAxiom => notInHornALCHOIQ(a) - - case a: OWLSubPropertyChainOfAxiom => notInHornALCHOIQ(a) + case a: OWLTransitiveObjectPropertyAxiom => { + val role = a.getProperty + normalize( + factory.getOWLSubPropertyChainOfAxiom( List(role, role), role) + ) + } - /** Unsupported */ + case a: OWLReflexiveObjectPropertyAxiom => + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) - case a: OWLAsymmetricObjectPropertyAxiom => notInHornALCHOIQ(a) + case a: OWLAsymmetricObjectPropertyAxiom => notSupported(a) case a: OWLDatatypeDefinitionAxiom => notSupported(a) @@ -536,74 +579,17 @@ class Normalizer() { case a: SWRLRule => notSupported(a) /** Axiom is already normalized */ + //case a: OWLSubPropertyChainOfAxiom => notSupported(a) case a => Seq(a) } - /** Shift an axiom with disjunction on the rhs */ - private def shift( - sub: OWLClassExpression, - sup: OWLObjectUnionOf - ): Seq[OWLLogicalAxiom] = { - val body = - sub.asConjunctSet.map((atom) => (atom, RSAUtil.getFreshOWLClass())) - val head = - sup.asDisjunctSet.map((atom) => (atom, RSAUtil.getFreshOWLClass())) - - /* Update statistics */ - shifted += 1 - - val r1 = - factory.getOWLSubClassOfAxiom( - factory.getOWLObjectIntersectionOf( - (body.map(_._1) ++ head.map(_._2)): _* - ), - factory.getOWLNothing - ) - - val r2s = - for { - (a, na) <- head - hs = head.map(_._2).filterNot(_ equals na) - } yield factory.getOWLSubClassOfAxiom( - factory.getOWLObjectIntersectionOf( - (body.map(_._1) ++ hs): _* - ), - a - ) - - val r3s = - for { - (a, na) <- body - bs = body.map(_._1).filterNot(_ equals a) - } yield factory.getOWLSubClassOfAxiom( - factory.getOWLObjectIntersectionOf( - (bs ++ head.map(_._2)): _* - ), - na - ) - - Seq(r1) ++ r2s ++ r3s - } - - /** Approximation function for axioms out of Horn-ALCHOIQ - * - * By default discards the axiom, which guarantees a lower bound - * ontology w.r.t. CQ answering. - */ - protected def notInHornALCHOIQ( - axiom: OWLLogicalAxiom - ): Seq[OWLLogicalAxiom] = { - /* Update statistics */ - discarded += 1 - Logger.print( - s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ", - Logger.VERBOSE - ) - Seq() - } - /** Non supported axioms */ private def notSupported(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = + // Logger.print( + // s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ", + // Logger.VERBOSE + // ) + // Seq() throw new RuntimeException( s"'$axiom' is not currently supported." ) -- cgit v1.2.3 From 19c22a38ccaf1685e345a18883ffbac891f97df3 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Tue, 3 Aug 2021 12:20:34 +0100 Subject: Extend converter to RDFox rules to handle property chain axioms The converter to RDFox datalog rules shouldn't worry about which axiom is supported or not (it should be a job of the approximation algorithm or of a previous step). Now the converter tries its best to convert anything that makes sense in datalog. Also upperbound is ready for testing. --- .../ox/cs/rsacomb/approximation/Upperbound.scala | 38 ------------- .../ox/cs/rsacomb/converter/RDFoxConverter.scala | 64 ++++++++++++---------- 2 files changed, 36 insertions(+), 66 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala index 65cdee1..ad924aa 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala @@ -55,44 +55,6 @@ class Upperbound extends Approximation[RSAOntology] { ) ) - /** Discards all axioms outside ALCHOIQ */ - // private def inALCHOIQ(axiom: OWLLogicalAxiom): Boolean = - // axiom match { - // case a: OWLSubClassOfAxiom => { - // val sub = a.getSubClass.getNNF - // val sup = a.getSuperClass.getNNF - // (sub, sup) match { - // case (sub: OWLObjectAllValuesFrom, _) => false - // case (sub: OWLDataAllValuesFrom, _) => false - // case (_, sup: OWLDataAllValuesFrom) => false - // case (sub: OWLObjectMinCardinality, _) if sub.getCardinality >= 2 => - // false - // case (sub: OWLDataMinCardinality, _) if sub.getCardinality >= 2 => - // false - // case (_, sup: OWLObjectMinCardinality) if sup.getCardinality >= 2 => - // false - // case (_, sup: OWLDataMinCardinality) if sup.getCardinality >= 2 => - // false - // case (sub: OWLObjectMaxCardinality, _) => false - // case (sub: OWLDataMaxCardinality, _) => false - // case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => - // false - // case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => - // false - // case (_, sup: OWLObjectOneOf) if sup.getIndividuals.length > 2 => - // false - // case (sub: OWLObjectHasSelf, _) => false - // case (_, sup: OWLObjectHasSelf) => false - // case _ => true - // } - // } - // case a: OWLTransitiveObjectPropertyAxiom => false - // case a: OWLReflexiveObjectPropertyAxiom => false - // case a: OWLSubPropertyChainOfAxiom => false - // case a: OWLAsymmetricObjectPropertyAxiom => false - // case a => true - // } - /** Turn disjuncts into conjuncts * * This is a very naïve way of getting rid of disjunction preserving diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala index 3aa3c5f..03c1246 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala @@ -91,35 +91,37 @@ trait RDFoxConverter { protected def ResultF(atoms: List[TupleTableAtom]): Result = (atoms, List()) protected def ResultR(rules: List[Rule]): Result = (List(), rules) - /** Converts a - * [[org.semanticweb.owlapi.model.OWLLogicalAxiom OWLLogicalAxiom]] - * into a collection of - * [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtoms]] - * and - * [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rules]]. + /** Converts a [[OWLLogicalAxiom]] into a collection of [[TupleTableAtoms]] and [[Rules]]. * * @note not all possible axioms are handled correctly, and in * general they are assumed to be normalised. Following is a list of * all unhandled class expressions: * - [[org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom OWLAsymmetricObjectPropertyAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom OWLDataPropertyAssertionAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom OWLDataPropertyRangeAxiom]] * - [[org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom OWLDatatypeDefinitionAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom OWLDifferentIndividualsAxiom]] * - [[org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom OWLDisjointDataPropertiesAxiom]] * - [[org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom OWLDisjointObjectPropertiesAxiom]] - * - [[org.semanticweb.owlapi.model.OWLDisjointUnionAxiom OWLDisjointUnionAxiom]] - * - [[org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom OWLEquivalentDataPropertiesAxiom]] - * - [[org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom OWLFunctionalDataPropertyAxiom]] * - [[org.semanticweb.owlapi.model.OWLHasKeyAxiom OWLHasKeyAxiom]] - * - [[org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom OWLIrreflexiveObjectPropertyAxiom]] - * - [[org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom OWLNegativeDataPropertyAssertionAxiom]] - * - [[org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom OWLNegativeObjectPropertyAssertionAxiom]] + * - [[org.semanticweb.owlapi.model.SWRLRule SWRLRule]] + * + * @note The following axioms are not handled directly but can be + * normalised beforehand. + * + * - [[org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom OWLTransitiveObjectPropertyAxiom]] + * - [[org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom OWLDataPropertyAssertionAxiom]] + * - [[org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom OWLDataPropertyRangeAxiom]] + * - [[org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom OWLDifferentIndividualsAxiom]] * - [[org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom OWLReflexiveObjectPropertyAxiom]] * - [[org.semanticweb.owlapi.model.OWLSameIndividualAxiom OWLSameIndividualAxiom]] - * - [[org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom OWLSubPropertyChainOfAxiom]] - * - [[org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom OWLTransitiveObjectPropertyAxiom]] - * - [[org.semanticweb.owlapi.model.SWRLRule SWRLRule]] + * - [[org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom OWLNegativeDataPropertyAssertionAxiom]] + * - [[org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom OWLNegativeObjectPropertyAssertionAxiom]] + * - [[org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom OWLIrreflexiveObjectPropertyAxiom]] + * - [[org.semanticweb.owlapi.model.OWLDisjointUnionAxiom OWLDisjointUnionAxiom]] + * - [[org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom OWLEquivalentDataPropertiesAxiom]] + * - [[org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom OWLFunctionalDataPropertyAxiom]] + * + * @see [[Normaliser]] + * @see + * http://owlcs.github.io/owlapi/apidocs_5/index.html */ def convert( axiom: OWLLogicalAxiom, @@ -250,20 +252,26 @@ trait RDFoxConverter { ResultF(List(prop)) } - case a: OWLDataPropertyRangeAxiom => - Result() // ignored - - case a: OWLFunctionalDataPropertyAxiom => - Result() - - case a: OWLTransitiveObjectPropertyAxiom => - Result() + case a: OWLSubPropertyChainOfAxiom => { + val (term1, body) = a.getPropertyChain.foldLeft((term, List[TupleTableAtom]())){ + case ((term, atoms), prop) => { + val term1 = RSAUtil.genFreshVariable() + val atom = convert(prop, term, term1, suffix) + (term1, atoms :+ atom) + } + } + val head = convert(a.getSuperProperty, term, term1, suffix) + ResultR(List(Rule.create(head, body))) + } /** Catch-all case for all unhandled axiom types. */ - case a => default(axiom) + case a => unsupported(axiom) } - protected def default(axiom: OWLLogicalAxiom): Result = + protected def toBeNormalised(axiom: OWLLogicalAxiom): Result = + throw new RuntimeException(s"Axiom '$axiom' should be normalised!") + + protected def unsupported(axiom: OWLLogicalAxiom): Result = throw new RuntimeException(s"Axiom '$axiom' is not supported (yet?)") /** Converts a class expression into a collection of atoms. -- cgit v1.2.3 From 75eb39dd0fd31c295b9ed9a6d3b0fd3b25611b2a Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Fri, 6 Aug 2021 13:31:54 +0100 Subject: Add new fresh data factory This will help write more significant test. --- .../scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala | 12 +- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 4 +- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 24 ---- .../ox/cs/rsacomb/approximation/Lowerbound.scala | 15 +-- .../ox/cs/rsacomb/approximation/Upperbound.scala | 17 ++- .../uk/ac/ox/cs/rsacomb/converter/Normalizer.scala | 149 ++++++++++++--------- .../ox/cs/rsacomb/converter/RDFoxConverter.scala | 111 +++++++-------- .../uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala | 9 +- .../uk/ac/ox/cs/rsacomb/ontology/Ontology.scala | 18 ++- src/main/scala/uk/ac/ox/cs/rsacomb/package.scala | 21 +++ .../uk/ac/ox/cs/rsacomb/util/DataFactory.scala | 29 ++++ 11 files changed, 227 insertions(+), 182 deletions(-) create mode 100644 src/main/scala/uk/ac/ox/cs/rsacomb/package.scala create mode 100644 src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala index ca54054..3467d3c 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala @@ -39,7 +39,7 @@ import implicits.JavaCollections._ import uk.ac.ox.cs.rsacomb.converter._ import uk.ac.ox.cs.rsacomb.suffix._ -import uk.ac.ox.cs.rsacomb.util.RSA +import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA} /** Canonical model generator * @@ -92,7 +92,7 @@ class CanonicalModel(val ontology: RSAOntology) { val (facts, rules): (List[TupleTableAtom], List[Rule]) = { // Compute rules from ontology axioms val (facts, rules) = { - val term = RSAUtil.genFreshVariable() + val term = Variable.create("X") val unsafe = ontology.unsafe ontology.axioms .map(a => @@ -216,13 +216,13 @@ class CanonicalModel(val ontology: RSAOntology) { unsafe: List[OWLObjectPropertyExpression], skolem: SkolemStrategy, suffix: RSASuffix - ): Result = + )(implicit fresh: DataFactory): Result = axiom match { case a: OWLSubClassOfAxiom if a.isT5 => { val role = axiom.objectPropertyExpressionsInSignature(0) if (unsafe contains role) - super.convert(a, term, unsafe, new Standard(a), Forward) + super.convert(a, term, unsafe, new Standard(a), Forward)(fresh) else { val (f1, r1) = rules1(a) (f1, r1 ::: rules2(a) ::: rules3(a)) @@ -231,12 +231,12 @@ class CanonicalModel(val ontology: RSAOntology) { case a: OWLSubObjectPropertyOfAxiom => { val (facts, rules) = List(Empty, Forward, Backward) - .map(super.convert(a, term, unsafe, NoSkolem, _)) + .map(super.convert(a, term, unsafe, NoSkolem, _)(fresh)) .unzip (facts.flatten, rules.flatten) } - case a => super.convert(a, term, unsafe, skolem, suffix) + case a => super.convert(a, term, unsafe, skolem, suffix)(fresh) } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index 4c63e17..a1fd20f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -28,7 +28,7 @@ import sparql.ConjunctiveQuery import uk.ac.ox.cs.rsacomb.ontology.Ontology import uk.ac.ox.cs.rsacomb.converter.Normalizer -import uk.ac.ox.cs.rsacomb.approximation.Lowerbound +import uk.ac.ox.cs.rsacomb.approximation.{Upperbound, Lowerbound} case class RSAOption[+T](opt: T) { def get[T]: T = opt.asInstanceOf[T] @@ -133,7 +133,7 @@ object RSAComb extends App { ).normalize(new Normalizer) /* Approximate the ontology to RSA */ - val toRSA = new Lowerbound + val toRSA = new Upperbound val rsa = ontology approximate toRSA if (config contains 'query) { diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 30e1305..869dd88 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -81,30 +81,6 @@ import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA} import uk.ac.ox.cs.rsacomb.util.Logger import uk.ac.ox.cs.rsacomb.ontology.Ontology -object RSAUtil { - - // implicit def axiomsToOntology(axioms: Seq[OWLAxiom]) = { - // val manager = OWLManager.createOWLOntologyManager() - // manager.createOntology(axioms.asJava) - // } - - /** Manager instance to interface with OWLAPI */ - val manager = OWLManager.createOWLOntologyManager() - val factory = manager.getOWLDataFactory() - - /** Simple fresh variable/class generator */ - private var counter = -1; - def genFreshVariable(): Variable = { - counter += 1 - Variable.create(f"I$counter%05d") - } - def getFreshOWLClass(): OWLClass = { - counter += 1 - factory.getOWLClass(s"X$counter") - } - -} - object RSAOntology { import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala index 290cbaf..88732d5 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala @@ -13,8 +13,8 @@ import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ import scalax.collection.GraphTraversal._ import uk.ac.ox.cs.rsacomb.RSAOntology -import uk.ac.ox.cs.rsacomb.RSAUtil import uk.ac.ox.cs.rsacomb.ontology.Ontology +import uk.ac.ox.cs.rsacomb.util.DataFactory object Lowerbound { @@ -38,7 +38,8 @@ object Lowerbound { * * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] */ -class Lowerbound extends Approximation[RSAOntology] { +class Lowerbound(implicit fresh: DataFactory) + extends Approximation[RSAOntology] { /** Simplify conversion between Java and Scala collections */ import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ @@ -122,12 +123,10 @@ class Lowerbound extends Approximation[RSAOntology] { val sup = a.getSuperClass.getNNF sup match { case sup: OWLObjectUnionOf => { - val body = sub.asConjunctSet.map((atom) => - (atom, RSAUtil.getFreshOWLClass()) - ) - val head = sup.asDisjunctSet.map((atom) => - (atom, RSAUtil.getFreshOWLClass()) - ) + val body = + sub.asConjunctSet.map((atom) => (atom, fresh.getOWLClass)) + val head = + sup.asDisjunctSet.map((atom) => (atom, fresh.getOWLClass)) val r1 = Lowerbound.factory.getOWLSubClassOfAxiom( diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala index ad924aa..1ae7941 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala @@ -13,8 +13,8 @@ import scalax.collection.GraphPredef._, scalax.collection.GraphEdge._ import scalax.collection.GraphTraversal._ import uk.ac.ox.cs.rsacomb.RSAOntology -import uk.ac.ox.cs.rsacomb.RSAUtil import uk.ac.ox.cs.rsacomb.ontology.Ontology +import uk.ac.ox.cs.rsacomb.util.DataFactory object Upperbound { @@ -38,7 +38,8 @@ object Upperbound { * * @see [[uk.ac.ox.cs.rsacomb.converter.Normalizer]] */ -class Upperbound extends Approximation[RSAOntology] { +class Upperbound(implicit fresh: DataFactory) + extends Approximation[RSAOntology] { /** Simplify conversion between Java and Scala collections */ import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ @@ -138,15 +139,17 @@ class Upperbound extends Approximation[RSAOntology] { import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ axiom.toTriple match { case Some((subclass, role, filler)) => { - val skolem = Upperbound.factory.getOWLNamedIndividual(s"i_${axiom.toString.hashCode}") - val fresh = RSAUtil.getFreshOWLClass() + val skolem = Upperbound.factory.getOWLNamedIndividual( + s"i_${axiom.toString.hashCode}" + ) + val cls = fresh.getOWLClass List( Upperbound.factory.getOWLSubClassOfAxiom( subclass, - Upperbound.factory.getOWLObjectSomeValuesFrom(role, fresh) + Upperbound.factory.getOWLObjectSomeValuesFrom(role, cls) ), Upperbound.factory.getOWLSubClassOfAxiom( - fresh, + cls, Upperbound.factory.getOWLObjectOneOf(skolem) ), Upperbound.factory.getOWLClassAssertionAxiom(filler, skolem) @@ -169,5 +172,5 @@ class Upperbound extends Approximation[RSAOntology] { // val edges2 = Seq('I ~> 'M, 'I ~> 'L, 'L ~> 'N, 'M ~> 'N) // val edges3 = Seq('P ~> 'O) // val graph = Graph.from(edges = edges1 ++ edges2 ++ edges3) - + } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala index b5da3cc..c24f99d 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala @@ -19,9 +19,8 @@ package uk.ac.ox.cs.rsacomb.converter import org.semanticweb.owlapi.apibinding.OWLManager import org.semanticweb.owlapi.model._ -import uk.ac.ox.cs.rsacomb.util.Logger +import uk.ac.ox.cs.rsacomb.util.{Logger, DataFactory} import uk.ac.ox.cs.rsacomb.RSAOntology -import uk.ac.ox.cs.rsacomb.RSAUtil object Normalizer { @@ -45,7 +44,9 @@ class Normalizer() { /** Normalizes a [[OWLLogicalAxiom]] */ - def normalize(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = + def normalize( + axiom: OWLLogicalAxiom + )(implicit fresh: DataFactory): Seq[OWLLogicalAxiom] = axiom match { case a: OWLSubClassOfAxiom => { val sub = a.getSubClass.getNNF @@ -56,11 +57,11 @@ class Normalizer() { * C c D -> { C c X, X c D } */ case _ if !sub.isOWLClass && !sup.isOWLClass => { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass Seq( factory.getOWLSubClassOfAxiom(sub, cls), factory.getOWLSubClassOfAxiom(cls, sup) - ).flatMap(normalize) + ).flatMap(normalize(_)(fresh)) } /** Conjunction on the lhs * @@ -77,7 +78,7 @@ class Normalizer() { if (conj.isOWLClass) (acc1 :+ conj, acc2) else { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass ( acc1 :+ cls, acc2 :+ factory.getOWLSubClassOfAxiom(conj, cls) @@ -89,9 +90,11 @@ class Normalizer() { factory.getOWLObjectIntersectionOf(acc1: _*), sup )) - .flatMap(normalize) + .flatMap(normalize(_)(fresh)) } else { - normalize(factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup)) + normalize( + factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup) + )(fresh) } } /** Conjunction on the rhs @@ -103,9 +106,11 @@ class Normalizer() { if (conjuncts.length > 0) { conjuncts .map(cls => factory.getOWLSubClassOfAxiom(sub, cls)) - .flatMap(normalize) + .flatMap(normalize(_)(fresh)) } else { - normalize(factory.getOWLSubClassOfAxiom(sub, factory.getOWLThing)) + normalize( + factory.getOWLSubClassOfAxiom(sub, factory.getOWLThing) + )(fresh) } } /** Disjunction on the lhs @@ -117,11 +122,11 @@ class Normalizer() { if (disjuncts.length > 0) { disjuncts .map(cls => factory.getOWLSubClassOfAxiom(cls, sup)) - .flatMap(normalize) + .flatMap(normalize(_)(fresh)) } else { normalize( factory.getOWLSubClassOfAxiom(factory.getOWLNothing, sup) - ) + )(fresh) } } /** Disjunction on the rhs @@ -137,9 +142,10 @@ class Normalizer() { if (disjuncts.length > 0) { val acc = (Seq[OWLClassExpression](), Seq[OWLLogicalAxiom]()) val (acc1, acc2) = disjuncts.foldLeft(acc)( - { case ((acc1, acc2), disj: OWLClass) => (acc1 :+ disj, acc2) + { + case ((acc1, acc2), disj: OWLClass) => (acc1 :+ disj, acc2) case ((acc1, acc2), disj) => { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass ( acc1 :+ cls, acc2 :+ factory.getOWLSubClassOfAxiom(cls, disj) @@ -150,9 +156,11 @@ class Normalizer() { (acc2 :+ factory.getOWLSubClassOfAxiom( sub, factory.getOWLObjectUnionOf(acc1: _*) - )).flatMap(normalize) + )).flatMap(normalize(_)(fresh)) } else { - normalize(factory.getOWLSubClassOfAxiom(sub, factory.getOWLNothing)) + normalize( + factory.getOWLSubClassOfAxiom(sub, factory.getOWLNothing) + )(fresh) } } /** Complex class expression on existential restriction on the lhs @@ -161,14 +169,14 @@ class Normalizer() { */ case (sub: OWLObjectSomeValuesFrom, _) if !sub.getFiller.isOWLClass => { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass Seq( factory.getOWLSubClassOfAxiom(sub.getFiller, cls), factory.getOWLSubClassOfAxiom( factory.getOWLObjectSomeValuesFrom(sub.getProperty, cls), sup ) - ).flatMap(normalize) + ).flatMap(normalize(_)(fresh)) } /** Complex class expression on existential restriction on the rhs * @@ -176,14 +184,14 @@ class Normalizer() { */ case (_, sup: OWLObjectSomeValuesFrom) if !sup.getFiller.isOWLClass => { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass Seq( factory.getOWLSubClassOfAxiom(cls, sup.getFiller), factory.getOWLSubClassOfAxiom( sub, factory.getOWLObjectSomeValuesFrom(sup.getProperty, cls) ) - ).flatMap(normalize) + ).flatMap(normalize(_)(fresh)) } /** Object universal quantification on the lhs * @@ -197,7 +205,7 @@ class Normalizer() { case (sub: OWLObjectAllValuesFrom, _) => { val role = sub.getProperty val filler = sub.getFiller - val (c, d) = (RSAUtil.getFreshOWLClass, RSAUtil.getFreshOWLClass) + val (c, d) = (fresh.getOWLClass, fresh.getOWLClass) Seq( factory.getOWLSubClassOfAxiom( factory.getOWLThing, @@ -208,12 +216,13 @@ class Normalizer() { factory.getOWLNothing ), factory.getOWLSubClassOfAxiom( - c, factory.getOWLObjectSomeValuesFrom(role, d) + c, + factory.getOWLObjectSomeValuesFrom(role, d) ) ) } /** Object/Data universal quantification on the lhs */ - case (sub: OWLDataAllValuesFrom, _) => notSupported(a) + case (sub: OWLDataAllValuesFrom, _) => notSupported(a) /** Object universal quantification on the rhs * * C c forall R . D -> exists R- . C c D @@ -228,7 +237,7 @@ class Normalizer() { ), sup.getFiller ) - ) + )(fresh) /** Object universal quantification on the rhs not supported */ case (_, sup: OWLDataAllValuesFrom) => notSupported(a) /** Exact object/data cardinality restriction on the lhs/rhs @@ -238,19 +247,19 @@ class Normalizer() { case (sub: OWLObjectExactCardinality, _) => normalize( factory.getOWLSubClassOfAxiom(sub.asIntersectionOfMinMax, sup) - ) + )(fresh) case (sub: OWLDataExactCardinality, _) => normalize( factory.getOWLSubClassOfAxiom(sub.asIntersectionOfMinMax, sup) - ) + )(fresh) case (_, sup: OWLObjectExactCardinality) => normalize( factory.getOWLSubClassOfAxiom(sub, sup.asIntersectionOfMinMax) - ) + )(fresh) case (_, sup: OWLDataExactCardinality) => normalize( factory.getOWLSubClassOfAxiom(sub, sup.asIntersectionOfMinMax) - ) + )(fresh) /** Min object/data cardinality restriction on the lhs/rhs * * >= 0 R . C -> top @@ -263,7 +272,7 @@ class Normalizer() { case 0 => normalize( factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup) - ) + )(fresh) case 1 => normalize( factory.getOWLSubClassOfAxiom( @@ -273,7 +282,7 @@ class Normalizer() { ), sup ) - ) + )(fresh) case _ => notSupported(a) } case (sub: OWLDataMinCardinality, _) => @@ -281,7 +290,7 @@ class Normalizer() { case 0 => normalize( factory.getOWLSubClassOfAxiom(factory.getOWLThing, sup) - ) + )(fresh) case 1 => normalize( factory.getOWLSubClassOfAxiom( @@ -291,7 +300,7 @@ class Normalizer() { ), sup ) - ) + )(fresh) case _ => notSupported(a) } case (_, sup: OWLObjectMinCardinality) => @@ -306,7 +315,7 @@ class Normalizer() { sup.getFiller ) ) - ) + )(fresh) case _ => notSupported(a) } case (_, sup: OWLDataMinCardinality) => @@ -321,7 +330,7 @@ class Normalizer() { sup.getFiller ) ) - ) + )(fresh) case _ => notSupported(a) } /** Max object/data cardinality restriction on the lhs not supported */ @@ -344,17 +353,17 @@ class Normalizer() { ), factory.getOWLNothing ) - ) + )(fresh) case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality == 1 && !sup.getFiller.isOWLClass => { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass Seq( factory.getOWLSubClassOfAxiom(cls, sup.getFiller), factory.getOWLSubClassOfAxiom( sub, factory.getOWLObjectMaxCardinality(1, sup.getProperty, cls) ) - ).flatMap(normalize) + ).flatMap(normalize(_)(fresh)) } case (_, sup: OWLObjectMaxCardinality) if sup.getCardinality >= 2 => notSupported(a) @@ -368,7 +377,7 @@ class Normalizer() { ), factory.getOWLNothing ) - ) + )(fresh) case (_, sup: OWLDataMaxCardinality) if sup.getCardinality >= 1 => notSupported(a) /** HasValue expression on the lhs/rhs @@ -384,7 +393,7 @@ class Normalizer() { ), sup ) - ) + )(fresh) case (sub: OWLDataHasValue, _) => normalize( factory.getOWLSubClassOfAxiom( @@ -394,7 +403,7 @@ class Normalizer() { ), sup ) - ) + )(fresh) case (_, sup: OWLObjectHasValue) => normalize( factory.getOWLSubClassOfAxiom( @@ -404,7 +413,7 @@ class Normalizer() { factory.getOWLObjectOneOf(sup.getFiller) ) ) - ) + )(fresh) case (_, sup: OWLDataHasValue) => normalize( factory.getOWLSubClassOfAxiom( @@ -414,7 +423,7 @@ class Normalizer() { factory.getOWLDataOneOf(sup.getFiller) ) ) - ) + )(fresh) /** Enumeration of individuals on the lhs * * {a1, ... ,an} c D -> { D(a1), ..., D(an) } @@ -433,7 +442,7 @@ class Normalizer() { sup.getIndividuals.map(factory.getOWLObjectOneOf(_)) ) ) - ) + )(fresh) /** Class complement on the lhs * * ~C c D -> top c C u D @@ -444,7 +453,7 @@ class Normalizer() { factory.getOWLThing, factory.getOWLObjectUnionOf(sub.getComplementNNF, sup) ) - ) + )(fresh) /** Class complement on the rhs * * C c ~D -> C n D c bot @@ -455,7 +464,7 @@ class Normalizer() { factory.getOWLObjectIntersectionOf(sup.getComplementNNF, sub), factory.getOWLNothing ) - ) + )(fresh) /** Self-restriction over an object property */ case (sub: OWLObjectHasSelf, _) => notSupported(a) case (_, sup: OWLObjectHasSelf) => notSupported(a) @@ -466,32 +475,34 @@ class Normalizer() { } case a: OWLEquivalentClassesAxiom => { - a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap(normalize) + a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap( + normalize(_)(fresh) + ) } case a: OWLEquivalentObjectPropertiesAxiom => { a.getAxiomWithoutAnnotations.asSubObjectPropertyOfAxioms.flatMap( - normalize + normalize(_)(fresh) ) } case a: OWLEquivalentDataPropertiesAxiom => { a.getAxiomWithoutAnnotations.asSubDataPropertyOfAxioms.flatMap( - normalize + normalize(_)(fresh) ) } case a: OWLObjectPropertyDomainAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLObjectPropertyRangeAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLDataPropertyDomainAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLDataPropertyRangeAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLDisjointClassesAxiom => a.asPairwiseAxioms.map((a) => { @@ -504,20 +515,22 @@ class Normalizer() { case a: OWLInverseObjectPropertiesAxiom => a.getAxiomWithoutAnnotations.asSubObjectPropertyOfAxioms.flatMap( - normalize + normalize(_)(fresh) ) case a: OWLFunctionalObjectPropertyAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLFunctionalDataPropertyAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLInverseFunctionalObjectPropertyAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLSymmetricObjectPropertyAxiom => - a.getAxiomWithoutAnnotations.asSubPropertyAxioms.flatMap(normalize) + a.getAxiomWithoutAnnotations.asSubPropertyAxioms.flatMap( + normalize(_)(fresh) + ) case a: OWLDifferentIndividualsAxiom => a.asPairwiseAxioms.map((a) => { @@ -529,42 +542,44 @@ class Normalizer() { }) case a: OWLIrreflexiveObjectPropertyAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLSameIndividualAxiom => - a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap(normalize) + a.getAxiomWithoutAnnotations.asOWLSubClassOfAxioms.flatMap( + normalize(_)(fresh) + ) case a: OWLDisjointUnionAxiom => Seq(a.getOWLDisjointClassesAxiom, a.getOWLEquivalentClassesAxiom) - .flatMap(normalize) + .flatMap(normalize(_)(fresh)) /** Complex class assertion * * C(a) -> { X(a), X c C } */ case a: OWLClassAssertionAxiom if !a.getClassExpression.isOWLClass => { - val cls = RSAUtil.getFreshOWLClass() + val cls = fresh.getOWLClass Seq( factory.getOWLClassAssertionAxiom(cls, a.getIndividual), factory.getOWLSubClassOfAxiom(cls, a.getClassExpression) - ).flatMap(normalize) + ).flatMap(normalize(_)(fresh)) } case a: OWLNegativeObjectPropertyAssertionAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLNegativeDataPropertyAssertionAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLTransitiveObjectPropertyAxiom => { val role = a.getProperty normalize( - factory.getOWLSubPropertyChainOfAxiom( List(role, role), role) - ) + factory.getOWLSubPropertyChainOfAxiom(List(role, role), role) + )(fresh) } case a: OWLReflexiveObjectPropertyAxiom => - normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom) + normalize(a.getAxiomWithoutAnnotations.asOWLSubClassOfAxiom)(fresh) case a: OWLAsymmetricObjectPropertyAxiom => notSupported(a) diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala index 03c1246..266c158 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala @@ -27,10 +27,9 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{ TupleTableAtom } import tech.oxfordsemantic.jrdfox.logic.expression.{Term, IRI, FunctionCall} -import uk.ac.ox.cs.rsacomb.RSAUtil import uk.ac.ox.cs.rsacomb.RSAOntology import uk.ac.ox.cs.rsacomb.suffix.{Empty, Inverse, RSASuffix} -import uk.ac.ox.cs.rsacomb.util.{RSA, RDFoxUtil} +import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA, RDFoxUtil} /** Horn-ALCHOIQ to RDFox axiom converter. * @@ -129,16 +128,16 @@ trait RDFoxConverter { unsafe: List[OWLObjectPropertyExpression], skolem: SkolemStrategy, suffix: RSASuffix - ): Result = + )(implicit fresh: DataFactory): Result = axiom match { case a: OWLSubClassOfAxiom => { val subcls = a.getSubClass val supcls = a.getSuperClass val (sub, _) = - convert(subcls, term, unsafe, NoSkolem, suffix) + convert(subcls, term, unsafe, NoSkolem, suffix)(fresh) val (sup, ext) = - convert(supcls, term, unsafe, skolem, suffix) + convert(supcls, term, unsafe, skolem, suffix)(fresh) val rule = Rule.create(sup, ext ::: sub) ResultR(List(rule)) } @@ -148,7 +147,7 @@ trait RDFoxConverter { case a: OWLEquivalentClassesAxiom => { val (atoms, rules) = a.asPairwiseAxioms .flatMap(_.asOWLSubClassOfAxioms) - .map(a => convert(a, term, unsafe, skolem dup a, suffix)) + .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh)) .unzip (atoms.flatten, rules.flatten) } @@ -156,61 +155,64 @@ trait RDFoxConverter { case a: OWLEquivalentObjectPropertiesAxiom => { val (atoms, rules) = a.asPairwiseAxioms .flatMap(_.asSubObjectPropertyOfAxioms) - .map(a => convert(a, term, unsafe, skolem dup a, suffix)) + .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh)) .unzip (atoms.flatten, rules.flatten) } case a: OWLSubObjectPropertyOfAxiom => { - val term1 = RSAUtil.genFreshVariable() - val body = convert(a.getSubProperty, term, term1, suffix) - val head = convert(a.getSuperProperty, term, term1, suffix) + val term1 = fresh.getVariable + val body = convert(a.getSubProperty, term, term1, suffix)(fresh) + val head = convert(a.getSuperProperty, term, term1, suffix)(fresh) ResultR(List(Rule.create(head, body))) } case a: OWLSubDataPropertyOfAxiom => { - val term1 = RSAUtil.genFreshVariable() - val body = convert(a.getSubProperty, term, term1, suffix) - val head = convert(a.getSuperProperty, term, term1, suffix) + val term1 = fresh.getVariable + val body = convert(a.getSubProperty, term, term1, suffix)(fresh) + val head = convert(a.getSuperProperty, term, term1, suffix)(fresh) ResultR(List(Rule.create(head, body))) } case a: OWLObjectPropertyDomainAxiom => - convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) + convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh) case a: OWLObjectPropertyRangeAxiom => { - val term1 = RSAUtil.genFreshVariable() - val (res, ext) = convert(a.getRange, term, unsafe, skolem, suffix) - val prop = convert(a.getProperty, term1, term, suffix) + val term1 = fresh.getVariable + val (res, ext) = + convert(a.getRange, term, unsafe, skolem, suffix)(fresh) + val prop = convert(a.getProperty, term1, term, suffix)(fresh) ResultR(List(Rule.create(res, prop :: ext))) } case a: OWLDataPropertyDomainAxiom => - convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) + convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh) case a: OWLDisjointClassesAxiom => { val body = a.getOperandsAsList.asScala.toSeq - .flatMap((cls) => convert(cls, term, unsafe, NoSkolem, suffix)._1) + .flatMap((cls) => + convert(cls, term, unsafe, NoSkolem, suffix)(fresh)._1 + ) val bottom = TupleTableAtom.rdf(term, IRI.RDF_TYPE, IRI.NOTHING) ResultR(List(Rule.create(bottom, body: _*))) } case a: OWLInverseObjectPropertiesAxiom => { val (atoms, rules) = a.asSubObjectPropertyOfAxioms - .map(a => convert(a, term, unsafe, skolem dup a, suffix)) + .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh)) .unzip (atoms.flatten, rules.flatten) } case a: OWLFunctionalObjectPropertyAxiom => - convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) + convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh) case a: OWLInverseFunctionalObjectPropertyAxiom => - convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix) + convert(a.asOWLSubClassOfAxiom, term, unsafe, skolem, suffix)(fresh) case a: OWLSymmetricObjectPropertyAxiom => { val (atoms, rules) = a.asSubPropertyAxioms - .map(a => convert(a, term, unsafe, skolem dup a, suffix)) + .map(a => convert(a, term, unsafe, skolem dup a, suffix)(fresh)) .unzip (atoms.flatten, rules.flatten) } @@ -221,7 +223,7 @@ trait RDFoxConverter { case i: OWLNamedIndividual => { val cls = a.getClassExpression val (res, _) = - convert(cls, i.getIRI, unsafe, NoSkolem, suffix) + convert(cls, i.getIRI, unsafe, NoSkolem, suffix)(fresh) ResultF(res) } case _ => Result() @@ -234,7 +236,7 @@ trait RDFoxConverter { else { val subj = a.getSubject.asOWLNamedIndividual.getIRI val obj = a.getObject.asOWLNamedIndividual.getIRI - val prop = convert(a.getProperty, subj, obj, suffix) + val prop = convert(a.getProperty, subj, obj, suffix)(fresh) ResultF(List(prop)) } @@ -248,29 +250,29 @@ trait RDFoxConverter { else { val subj = a.getSubject.asOWLNamedIndividual.getIRI val obj = a.getObject - val prop = convert(a.getProperty, subj, obj, suffix) + val prop = convert(a.getProperty, subj, obj, suffix)(fresh) ResultF(List(prop)) } case a: OWLSubPropertyChainOfAxiom => { - val (term1, body) = a.getPropertyChain.foldLeft((term, List[TupleTableAtom]())){ - case ((term, atoms), prop) => { - val term1 = RSAUtil.genFreshVariable() - val atom = convert(prop, term, term1, suffix) - (term1, atoms :+ atom) + val (term1, body) = + a.getPropertyChain.foldLeft((term, List[TupleTableAtom]())) { + case ((term, atoms), prop) => { + val term1 = fresh.getVariable + val atom = convert(prop, term, term1, suffix)(fresh) + (term1, atoms :+ atom) + } } - } - val head = convert(a.getSuperProperty, term, term1, suffix) - ResultR(List(Rule.create(head, body))) + val head = convert(a.getSuperProperty, term, term1, suffix)(fresh) + val rule = Rule.create(head, body) + println(rule) + ResultR(List(rule)) } /** Catch-all case for all unhandled axiom types. */ case a => unsupported(axiom) } - protected def toBeNormalised(axiom: OWLLogicalAxiom): Result = - throw new RuntimeException(s"Axiom '$axiom' should be normalised!") - protected def unsupported(axiom: OWLLogicalAxiom): Result = throw new RuntimeException(s"Axiom '$axiom' is not supported (yet?)") @@ -299,7 +301,7 @@ trait RDFoxConverter { unsafe: List[OWLObjectPropertyExpression], skolem: SkolemStrategy, suffix: RSASuffix - ): Shards = + )(implicit fresh: DataFactory): Shards = expr match { /** Simple class name. @@ -318,7 +320,7 @@ trait RDFoxConverter { */ case e: OWLObjectIntersectionOf => { val (res, ext) = e.asConjunctSet - .map(convert(_, term, unsafe, skolem, suffix)) + .map(convert(_, term, unsafe, skolem, suffix)(fresh)) .unzip (res.flatten, ext.flatten) } @@ -352,14 +354,14 @@ trait RDFoxConverter { case e: OWLObjectSomeValuesFrom => { val cls = e.getFiller() val role = e.getProperty() - val varX = RSAUtil.genFreshVariable + val varX = fresh.getVariable val (bind, term1) = skolem match { case NoSkolem => (None, varX) case c: Constant => (None, c.iri) case s: Standard => (Some(RDFoxUtil.skolem(s.name, term, varX)), varX) } - val (res, ext) = convert(cls, term1, unsafe, skolem, suffix) - val prop = convert(role, term, term1, suffix) + val (res, ext) = convert(cls, term1, unsafe, skolem, suffix)(fresh) + val prop = convert(role, term, term1, suffix)(fresh) (prop :: res, ext ++ bind) } @@ -379,13 +381,13 @@ trait RDFoxConverter { // Computes the result of rule skolemization. Depending on the used // technique it might involve the introduction of additional atoms, // and/or fresh constants and variables. - val varX = RSAUtil.genFreshVariable + val varX = fresh.getVariable val (bind, term1) = skolem match { case NoSkolem => (None, varX) case c: Constant => (None, c.iri) case s: Standard => (Some(RDFoxUtil.skolem(s.name, term, varX)), varX) } - val prop = convert(role, term, term1, suffix) + val prop = convert(role, term, term1, suffix)(fresh) (List(prop), bind.toList) } @@ -404,11 +406,12 @@ trait RDFoxConverter { s"Class expression '$e' has cardinality restriction != 1." ) val vars @ (y :: z :: _) = - Seq(RSAUtil.genFreshVariable(), RSAUtil.genFreshVariable()) + Seq(fresh.getVariable, fresh.getVariable) val cls = e.getFiller val role = e.getProperty - val (res, ext) = vars.map(convert(cls, _, unsafe, skolem, suffix)).unzip - val props = vars.map(convert(role, term, _, suffix)) + val (res, ext) = + vars.map(convert(cls, _, unsafe, skolem, suffix)(fresh)).unzip + val props = vars.map(convert(role, term, _, suffix)(fresh)) val eq = TupleTableAtom.rdf(y, IRI.SAME_AS, z) (List(eq), res.flatten ++ props) } @@ -431,7 +434,7 @@ trait RDFoxConverter { val filler = e.getFiller val property = e.getProperty val expr = factory.getOWLObjectSomeValuesFrom(property, filler) - convert(expr, term, unsafe, skolem, suffix) + convert(expr, term, unsafe, skolem, suffix)(fresh) } /** Minimum cardinality restriction class @@ -452,7 +455,7 @@ trait RDFoxConverter { val filler = e.getFiller val property = e.getProperty val expr = factory.getOWLDataSomeValuesFrom(property, filler) - convert(expr, term, unsafe, skolem, suffix) + convert(expr, term, unsafe, skolem, suffix)(fresh) } //case (_, sup: OWLObjectExactCardinality) => { @@ -475,7 +478,7 @@ trait RDFoxConverter { case i: OWLNamedIndividual => i.getIRI case i: OWLAnonymousIndividual => i.getID } - (List(convert(e.getProperty, term, term1, suffix)), List()) + (List(convert(e.getProperty, term, term1, suffix)(fresh)), List()) } /** Existential quantification with singleton literal filler @@ -484,7 +487,7 @@ trait RDFoxConverter { * [[http://www.w3.org/TR/owl2-syntax/#Literal_Value_Restriction]] */ case e: OWLDataHasValue => - (List(convert(e.getProperty, term, e.getFiller, suffix)), List()) + (List(convert(e.getProperty, term, e.getFiller, suffix)(fresh)), List()) case e: OWLObjectUnionOf => { (List(), List()) @@ -503,7 +506,7 @@ trait RDFoxConverter { term1: Term, term2: Term, suffix: RSASuffix - ): TupleTableAtom = + )(implicit fresh: DataFactory): TupleTableAtom = expr match { /** Simple named role/object property. @@ -524,7 +527,7 @@ trait RDFoxConverter { */ case e: OWLObjectInverseOf => //convert(e.getInverse, term1, term2, suffix + Inverse) - convert(e.getInverse, term2, term1, suffix) + convert(e.getInverse, term2, term1, suffix)(fresh) /** The infamous impossible case. * @@ -543,7 +546,7 @@ trait RDFoxConverter { term1: Term, term2: Term, suffix: RSASuffix - ): TupleTableAtom = + )(implicit fresh: DataFactory): TupleTableAtom = expr match { /** Simple named role/data property diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala index 09bfa1e..795e039 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala @@ -25,10 +25,9 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{ } import tech.oxfordsemantic.jrdfox.logic.expression.{IRI} -import uk.ac.ox.cs.rsacomb.RSAUtil import uk.ac.ox.cs.rsacomb.RSAOntology import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Nth} -import uk.ac.ox.cs.rsacomb.util.RDFoxUtil +import uk.ac.ox.cs.rsacomb.util.{DataFactory, RDFoxUtil} /* Is this the best way to determine if an atom is an RDF triple? * Note that we can't use `getNumberOfArguments()` because is not @@ -91,11 +90,13 @@ object RSAAtom { TupleTableAtom.create(ttname, atom.getArguments()) } - lazy val reified: (Option[TupleTableAtom], List[TupleTableAtom]) = + def reified(implicit + fresh: DataFactory + ): (Option[TupleTableAtom], List[TupleTableAtom]) = if (isRDF) { (None, List(atom)) } else { - val varS = RSAUtil.genFreshVariable() + val varS = fresh.getVariable val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*) val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name) val atoms = args.zipWithIndex diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala index ba44605..1f44ce1 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala @@ -35,9 +35,7 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, Term, Variable} import uk.ac.ox.cs.rsacomb.approximation.Approximation import uk.ac.ox.cs.rsacomb.converter._ import uk.ac.ox.cs.rsacomb.suffix._ -import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil, RSA} - -import uk.ac.ox.cs.rsacomb.RSAUtil +import uk.ac.ox.cs.rsacomb.util.{DataFactory, RDFoxUtil, RSA} object Ontology { @@ -95,12 +93,13 @@ object Ontology { unsafe: List[OWLObjectPropertyExpression], skolem: SkolemStrategy, suffix: RSASuffix - ): Shards = + )(implicit fresh: DataFactory): Shards = (expr, skolem) match { case (e: OWLObjectSomeValuesFrom, c: Constant) => { nodemap.update(c.iri.getIRI, c.axiom) - val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) + val (res, ext) = + super.convert(e, term, unsafe, skolem, suffix)(fresh) if (unsafe contains e.getProperty) (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) else @@ -109,19 +108,20 @@ object Ontology { case (e: OWLDataSomeValuesFrom, c: Constant) => { nodemap.update(c.iri.getIRI, c.axiom) - val (res, ext) = super.convert(e, term, unsafe, skolem, suffix) + val (res, ext) = + super.convert(e, term, unsafe, skolem, suffix)(fresh) if (unsafe contains e.getProperty) (RSA.PE(term, c.iri) :: RSA.U(c.iri) :: res, ext) else (RSA.PE(term, c.iri) :: res, ext) } - case _ => super.convert(expr, term, unsafe, skolem, suffix) + case _ => super.convert(expr, term, unsafe, skolem, suffix)(fresh) } } /* Ontology convertion into LP rules */ - val term = RSAUtil.genFreshVariable() + val term = Variable.create("X") val result = axioms.map(a => RSAConverter.convert(a, term, unsafe, new Constant(a), Empty) ) @@ -216,8 +216,6 @@ class Ontology(val axioms: List[OWLLogicalAxiom], val datafiles: List[File]) { /** Simplify conversion between Java and Scala collections */ import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ - println(s"Axioms: ${axioms.length}") - /** OWLOntology based on input axioms * * This is mainly used to instantiate a new reasoner to be used in diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/package.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/package.scala new file mode 100644 index 0000000..53fa095 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/package.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2020, 2021 KRR Oxford + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package uk.ac.ox.cs +package object rsacomb { + + implicit val seed: util.DataFactory = util.DataFactory(0) +} diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala new file mode 100644 index 0000000..848c6b5 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala @@ -0,0 +1,29 @@ +package uk.ac.ox.cs.rsacomb.util + +import org.semanticweb.owlapi.apibinding.OWLManager +import org.semanticweb.owlapi.model.OWLClass +import tech.oxfordsemantic.jrdfox.logic.expression.Variable + +/** Simple fresh variable/class generator */ +object DataFactory { + + /** Manager instance to interface with OWLAPI */ + private val manager = OWLManager.createOWLOntologyManager() + private val factory = manager.getOWLDataFactory() + + def apply(counter: Integer = -1): DataFactory = new DataFactory(counter) +} + +class DataFactory(private var counter: Integer) { + + private def getNext(): Integer = { + counter += 1 + counter + } + + def getVariable(): Variable = + Variable.create(f"I${this.getNext()}%05d") + + def getOWLClass(): OWLClass = + DataFactory.factory.getOWLClass(s"X${this.getNext()}") +} -- cgit v1.2.3 From 0f52fea098088d8e90410b97213f60c52407f8ff Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Wed, 1 Sep 2021 13:14:23 +0100 Subject: Update RDFox library to 5.2.1 --- src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 1 + .../uk/ac/ox/cs/rsacomb/converter/Normalizer.scala | 17 +++++++++-------- src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 3 ++- 3 files changed, 12 insertions(+), 9 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 869dd88..0c88f7f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -518,6 +518,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) /* Generate `named` predicates */ RDFoxUtil.addFacts(data, (individuals ++ literals) map RSA.Named) data.evaluateUpdate( + null, // the base IRI for the query (if null, a default is used) RSA.Prefixes, "INSERT { ?X a rsa:Named } WHERE { ?X a owl:Thing }", new java.util.HashMap[String, String] diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala index c24f99d..33cb715 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/Normalizer.scala @@ -599,13 +599,14 @@ class Normalizer() { } /** Non supported axioms */ - private def notSupported(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = - // Logger.print( - // s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ", - // Logger.VERBOSE - // ) - // Seq() - throw new RuntimeException( - s"'$axiom' is not currently supported." + private def notSupported(axiom: OWLLogicalAxiom): Seq[OWLLogicalAxiom] = { + Logger.print( + s"'$axiom' has been ignored because it is not in Horn-ALCHOIQ", + Logger.VERBOSE ) + Seq() + // throw new RuntimeException( + // s"'$axiom' is not currently supported." + // ) + } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index a9027cf..3d67270 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -84,8 +84,9 @@ object RDFoxUtil { val password = "" val server = ConnectionFactory.newServerConnection(serverUrl, role, password) + opts.put("type", "par-complex-nn") if (!server.containsDataStore(datastore)) - server.createDataStore(datastore, "par-complex-nn", opts) + server.createDataStore(datastore, opts) val data = server.newDataStoreConnection(datastore) (server, data) } -- cgit v1.2.3 From b1799a0af7bd243fb24bff66682f6f848557c27d Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Wed, 29 Sep 2021 15:16:25 +0100 Subject: Add JSON format for CQ answers --- .../ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala | 5 ++++- .../cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala | 20 +++++++++++++++++--- 2 files changed, 21 insertions(+), 4 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala index 37a21e7..c405008 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala @@ -32,12 +32,14 @@ import uk.ac.ox.cs.rsacomb.util.RDFoxUtil /** Factory for [[uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery]]. */ object ConjunctiveQuery { + private var idCounter: Int = 0; + /** Creates a new ConjunctiveQuery instance. * * @param query `SelectQuery` instance representing the actual query */ def apply(query: SelectQuery): ConjunctiveQuery = - new ConjunctiveQuery(query) + new ConjunctiveQuery({ idCounter += 1; idCounter }, query) /** Creates a new ConjunctiveQuery from a query string * @@ -66,6 +68,7 @@ object ConjunctiveQuery { * `SelectQuery` to be considered a conjunctive query. */ class ConjunctiveQuery( + val id: Int, query: SelectQuery, val prefixes: Prefixes = new Prefixes() ) { diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala index 4166655..3d16351 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala @@ -16,6 +16,7 @@ package uk.ac.ox.cs.rsacomb.sparql +import ujson._ import tech.oxfordsemantic.jrdfox.logic.expression.{ IRI, Literal, @@ -33,19 +34,32 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{ * BCQs, and empty collection represents a ''false'', ''true'' otherwise. */ class ConjunctiveQueryAnswers( - bcq: Boolean, + val query: ConjunctiveQuery, val variables: Seq[Variable], val answers: Seq[(Long, Seq[Resource])] ) { /** Returns number of distinct answers. */ - val length: Int = if (bcq) 0 else answers.length + val length: Int = if (query.bcq) 0 else answers.length /** Returns number of answers taking into account multiplicity. */ val lengthWithMultiplicity: Long = answers.map(_._1).sum + /** Serialise answers as JSON file */ + def toJSON(): ujson.Js.Value = { + ujson.Obj( + "queryID" -> query.id, + "queryText" -> query.toString + .split('\n') + .map(_.trim.filter(_ >= ' ')) + .mkString(" "), + "answerVariables" -> ujson.Arr(query.answer.map(_.toString())), + "answers" -> ujson.Arr(answers.map(_._2.mkString(" ")).sorted) + ) + } + override def toString(): String = - if (bcq) { + if (query.bcq) { if (answers.isEmpty) "FALSE" else "TRUE" } else { if (answers.isEmpty) -- cgit v1.2.3 From a5a4f5eba46c51c2a14e9915bfa34e78bff59030 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Wed, 29 Sep 2021 16:43:01 +0100 Subject: Add import of multiple queries --- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 56 ++++++++++------------ .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 4 +- .../scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 50 +++++++++++++++---- 3 files changed, 70 insertions(+), 40 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index a1fd20f..b1fd721 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -49,7 +49,7 @@ object RSAConfig { -h | -? | --help print this help message - -q | --query + -q | --queries path to a file containing a single SPARQL query. If no query is provided, only the approximation to RSA will be performed. @@ -97,11 +97,11 @@ object RSAConfig { println(help) sys.exit(0) } - case flag @ ("-q" | "--query") :: _query :: tail => { + case flag @ ("-q" | "--queries") :: _query :: tail => { val query = new File(_query) if (!query.isFile) exit(s"'$query' is not a valid filename.") - parse(tail, config ++ Map('query -> query)) + parse(tail, config ++ Map('queries -> query)) } case _ontology :: _data => { val ontology = new File(_ontology) @@ -132,36 +132,32 @@ object RSAComb extends App { config('data).get[List[File]] ).normalize(new Normalizer) + //ontology.axioms foreach println + /* Approximate the ontology to RSA */ val toRSA = new Upperbound val rsa = ontology approximate toRSA - if (config contains 'query) { - val query = - RDFoxUtil.loadQueryFromFile(config('query).get[File].getAbsoluteFile) - - ConjunctiveQuery.parse(query) match { - case Some(query) => { - // Retrieve answers - val answers = rsa ask query - Logger.print(s"$answers", Logger.VERBOSE) - Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" - // Retrieve unfiltered answers - // val unfiltered = rsa.queryDataStore( - // """ - // SELECT (count(?K) as ?COUNT) - // WHERE { - // ?K a rsa:QM . - // } - // """, - // RSA.Prefixes - // ) - // unfiltered.foreach((u) => - // Logger print s"Number of unfiltered answers: ${u.head._2}" - // ) - } - case None => - throw new RuntimeException("Submitted query is not conjunctive") - } + if (config contains 'queries) { + val queries = + RDFoxUtil.loadQueriesFromFile(config('queries).get[File].getAbsoluteFile) + + val answers = rsa ask queries + + // Logger.print(s"$answers", Logger.VERBOSE) + // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" + // Retrieve unfiltered answers + // val unfiltered = rsa.queryDataStore( + // """ + // SELECT (count(?K) as ?COUNT) + // WHERE { + // ?K a rsa:QM . + // } + // """, + // RSA.Prefixes + // ) + // unfiltered.foreach((u) => + // Logger print s"Number of unfiltered answers: ${u.head._2}" + // ) } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 0c88f7f..8e05f3a 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -496,6 +496,8 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = this.self(axiom) | this.cycle(axiom) + def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] = ??? + /** Returns the answers to a query * * @param query query to execute @@ -553,7 +555,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) val ans = filter.answerQuery RDFoxUtil .submitQuery(data, ans, RSA.Prefixes) - .map(new ConjunctiveQueryAnswers(query.bcq, query.variables, _)) + .map(new ConjunctiveQueryAnswers(query, query.variables, _)) .get } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index 3d67270..620d2dd 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -41,6 +41,7 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{ Term } import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery +import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery import uk.ac.ox.cs.rsacomb.suffix.Nth import uk.ac.ox.cs.rsacomb.util.Logger @@ -177,15 +178,6 @@ object RDFoxUtil { def materialize(data: DataStoreConnection): Unit = Logger.timed(data.updateMaterialization(), "Materialization", Logger.DEBUG) - /** Load SPARQL query from file. */ - def loadQueryFromFile(file: File): String = { - val source = io.Source.fromFile(file) - val query = source.getLines mkString "\n" - Logger print s"Loaded query:\n$query" - source.close() - query - } - /** Export data in `text/turtle`. * * @param data datastore connection from which to export data. @@ -206,6 +198,46 @@ object RDFoxUtil { ) } + /** Load SPARQL queries from file. + * + * The file can list multiple queries, each preceeded with a + * single line containing "#^[Query]" where "" is a number. + * Empty lines are ignored. + * + * @note if a query is not recognized as a [[SelectQuery]] by RDFox + * it is discarded. + * + * @param file file containing a list of conjunctive queries. + * @param prefixes additional prefixes for the query. It defaults to + * an empty set. + * + * @return a list of [[tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery SelectQuery]] queries. + */ + def loadQueriesFromFile( + file: File, + prefixes: Prefixes = new Prefixes() + ): List[ConjunctiveQuery] = { + val source = io.Source.fromFile(file) + val queries = source.getLines + .map(_.trim.filter(_ >= ' ')) + .filterNot(_ == "") + .foldRight((List.empty[List[String]], List.empty[String])) { + case (line, (acc, query)) => { + if ("^#\\^\\[Query\\d+\\]$".r.matches(line)) + (query :: acc, List.empty) + else + (acc, line :: query) + } + } + ._1 + .map(_.mkString(" ")) + .map(ConjunctiveQuery.parse(_, prefixes)) + .collect { case Some(q) => q } + Logger print s"Loaded ${queries.length} queries from ${file.getAbsolutePath}" + source.close() + queries + } + /** Parse a SELECT query from a string in SPARQL format. * * @param query the string containing the SPARQL query -- cgit v1.2.3 From a7e7f8a5d0bac2a12127c10c6e15e602e4cd43ad Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Wed, 29 Sep 2021 16:44:00 +0100 Subject: Always use internal equality instead of SAME_AS --- src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala index 266c158..276ee1a 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala @@ -340,7 +340,7 @@ trait RDFoxConverter { .collect { case x: OWLNamedIndividual => x } if (named.length != 1) throw new RuntimeException(s"Class expression '$e' has arity != 1.") - val atom = TupleTableAtom.rdf(term, IRI.SAME_AS, named.head.getIRI) + val atom = RSA.Congruent(term, named.head.getIRI) (List(atom), List()) } @@ -412,7 +412,7 @@ trait RDFoxConverter { val (res, ext) = vars.map(convert(cls, _, unsafe, skolem, suffix)(fresh)).unzip val props = vars.map(convert(role, term, _, suffix)(fresh)) - val eq = TupleTableAtom.rdf(y, IRI.SAME_AS, z) + val eq = RSA.Congruent(y, z) (List(eq), res.flatten ++ props) } -- cgit v1.2.3 From 95a2e9e85e1783e1bf2b50ae37bd9eab003a6ca8 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Wed, 29 Sep 2021 17:36:07 +0100 Subject: Write answers to output file --- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 16 +++++++++++++++- .../ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala | 3 +-- 2 files changed, 16 insertions(+), 3 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index b1fd721..c030301 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -16,7 +16,8 @@ package uk.ac.ox.cs.rsacomb -import java.io.File +import java.io.{File, PrintWriter} +import java.nio.file.{Path, Paths, InvalidPathException} import java.util.HashMap import scala.collection.JavaConverters._ import tech.oxfordsemantic.jrdfox.client.UpdateType @@ -97,6 +98,14 @@ object RSAConfig { println(help) sys.exit(0) } + case flag @ ("-o" | "--output") :: _output :: tail => + try { + val output = Paths.get(_output) + parse(tail, config ++ Map('output -> output)) + } catch { + case e: InvalidPathException => + exit(s"'${_output}' is not a valid filename.") + } case flag @ ("-q" | "--queries") :: _query :: tail => { val query = new File(_query) if (!query.isFile) @@ -144,6 +153,11 @@ object RSAComb extends App { val answers = rsa ask queries + /* Write answers to output file */ + val output = new PrintWriter(config('output).get[Path].toFile) + output.write(ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 4)) + output.close() + // Logger.print(s"$answers", Logger.VERBOSE) // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" // Retrieve unfiltered answers diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala index 3d16351..5b97679 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQueryAnswers.scala @@ -46,7 +46,7 @@ class ConjunctiveQueryAnswers( val lengthWithMultiplicity: Long = answers.map(_._1).sum /** Serialise answers as JSON file */ - def toJSON(): ujson.Js.Value = { + def toJSON(): ujson.Js.Value = ujson.Obj( "queryID" -> query.id, "queryText" -> query.toString @@ -56,7 +56,6 @@ class ConjunctiveQueryAnswers( "answerVariables" -> ujson.Arr(query.answer.map(_.toString())), "answers" -> ujson.Arr(answers.map(_._2.mkString(" ")).sorted) ) - } override def toString(): String = if (query.bcq) { -- cgit v1.2.3 From 1ef8a2502532dd1736c1e3d6a1ff78ed6b8b643c Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Thu, 30 Sep 2021 12:32:25 +0100 Subject: Refactor query answering to use named graphs --- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 176 ++++++++++++++------- .../ox/cs/rsacomb/filtering/FilteringProgram.scala | 3 + .../rsacomb/filtering/NaiveFilteringProgram.scala | 9 +- .../filtering/RevisedFilteringProgram.scala | 4 +- .../scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 38 +++-- 5 files changed, 148 insertions(+), 82 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 8e05f3a..6e9a119 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -96,7 +96,10 @@ object RSAOntology { * @param query the query to derive the filtering program * @return the filtering program for the given query */ - def filteringProgram(query: ConjunctiveQuery): FilteringProgram = + def filteringProgram( + graph: String, + query: ConjunctiveQuery + ): FilteringProgram = Logger.timed( FilteringProgram(FilterType.REVISED)(query), "Generating filtering program", @@ -496,76 +499,131 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = this.self(axiom) | this.cycle(axiom) - def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] = ??? - - /** Returns the answers to a query + /** Returns the answers to a collection of queries * - * @param query query to execute - * @return a collection of answers + * @param queries a sequence of conjunctive queries to answer. + * @return a collection of answers for each query. */ - def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed( - { - val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) - val canon = this.canonicalModel - val filter = RSAOntology.filteringProgram(query) - - /* Upload data from data file */ - RDFoxUtil.addData(data, datafiles: _*) - - RDFoxUtil printStatisticsFor data - - /* Top / equality axiomatization */ - RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) - - /* Generate `named` predicates */ - RDFoxUtil.addFacts(data, (individuals ++ literals) map RSA.Named) - data.evaluateUpdate( - null, // the base IRI for the query (if null, a default is used) - RSA.Prefixes, - "INSERT { ?X a rsa:Named } WHERE { ?X a owl:Thing }", - new java.util.HashMap[String, String] - ) - - /* Add canonical model */ - Logger print s"Canonical model rules: ${canon.rules.length}" - RDFoxUtil.addRules(data, canon.rules) - - Logger print s"Canonical model facts: ${canon.facts.length}" - RDFoxUtil.addFacts(data, canon.facts) + def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] = { + val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) + val canonNamedGraph = "http://cs.ox.ac.uk/isg/RSAComb#CanonicalModel" + // Create a new NamedGraph for the canonical model + data.createTupleTable(canonNamedGraph, Map("type" -> "named-graph").asJava) + + /* Upload data from data file */ + RDFoxUtil.addData(canonNamedGraph, data, datafiles: _*) + /* Top / equality axiomatization */ + RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) + /* Generate `named` predicates */ + RDFoxUtil.addFacts( + canonNamedGraph, + data, + (individuals ++ literals) map RSA.Named + ) + data.evaluateUpdate( + null, // the base IRI for the query (if null, a default is used) + RSA.Prefixes, + s""" + INSERT { + GRAPH <$canonNamedGraph> { ?X a rsa:Named } + } WHERE { + GRAPH <$canonNamedGraph> { ?X a owl:Thing } + } + """, + new java.util.HashMap[String, String] + ) - RDFoxUtil printStatisticsFor data + /* Add canonical model */ + Logger print s"Canonical model rules: ${this.canonicalModel.rules.length}" + RDFoxUtil.addRules(data, this.canonicalModel.rules) - //{ - // import java.io.{PrintStream, FileOutputStream, File} - // val rules1 = new FileOutputStream(new File("rules1-lubm200.dlog")) - // val facts1 = new FileOutputStream(new File("facts1-lubm200.ttl")) - // RDFoxUtil.export(data, rules1, facts1) - // val rules2 = new PrintStream(new File("rules2-q34.dlog")) - // rules2.print(filter.rules.mkString("\n")) - //} + Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}" + RDFoxUtil.addFacts(canonNamedGraph, data, this.canonicalModel.facts) - /* Add filtering program */ - Logger print s"Filtering program rules: ${filter.rules.length}" - RDFoxUtil.addRules(data, filter.rules) + queries map { query => + { + val filterNamedGraph = + s"http://cs.ox.ac.uk/isg/RSAComb#Filter${query.id}" + val filter = RSAOntology.filteringProgram(filterNamedGraph, query) + /* Add filtering program */ + Logger print s"Filtering program rules: ${filter.rules.length}" + RDFoxUtil.addRules(data, filter.rules) - RDFoxUtil printStatisticsFor data + // We remove the rules, should we drop the tuple table as well? + data.clearRulesAxiomsExplicateFacts() - /* Gather answers to the query */ - val answers = { - val ans = filter.answerQuery + /* Gather answers to the query */ RDFoxUtil - .submitQuery(data, ans, RSA.Prefixes) + .submitQuery(data, filter.answerQuery, RSA.Prefixes) .map(new ConjunctiveQueryAnswers(query, query.variables, _)) .get } + } + } - RDFoxUtil.closeConnection(server, data) - - answers - }, - "Answers computation", - Logger.DEBUG - ) + //def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed( + // { + // val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) + // val canon = this.canonicalModel + // val filter = RSAOntology.filteringProgram(query) + + // /* Upload data from data file */ + // RDFoxUtil.addData(data, datafiles: _*) + + // RDFoxUtil printStatisticsFor data + + // /* Top / equality axiomatization */ + // RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) + + // /* Generate `named` predicates */ + // RDFoxUtil.addFacts(data, (individuals ++ literals) map RSA.Named) + // data.evaluateUpdate( + // null, // the base IRI for the query (if null, a default is used) + // RSA.Prefixes, + // "INSERT { ?X a rsa:Named } WHERE { ?X a owl:Thing }", + // new java.util.HashMap[String, String] + // ) + + // /* Add canonical model */ + // Logger print s"Canonical model rules: ${canon.rules.length}" + // RDFoxUtil.addRules(data, canon.rules) + + // Logger print s"Canonical model facts: ${canon.facts.length}" + // RDFoxUtil.addFacts(data, canon.facts) + + // RDFoxUtil printStatisticsFor data + + // //{ + // // import java.io.{PrintStream, FileOutputStream, File} + // // val rules1 = new FileOutputStream(new File("rules1-lubm200.dlog")) + // // val facts1 = new FileOutputStream(new File("facts1-lubm200.ttl")) + // // RDFoxUtil.export(data, rules1, facts1) + // // val rules2 = new PrintStream(new File("rules2-q34.dlog")) + // // rules2.print(filter.rules.mkString("\n")) + // //} + + // /* Add filtering program */ + // Logger print s"Filtering program rules: ${filter.rules.length}" + // RDFoxUtil.addRules(data, filter.rules) + + // RDFoxUtil printStatisticsFor data + + // /* Gather answers to the query */ + // val answers = { + // val ans = filter.answerQuery + // RDFoxUtil + // .submitQuery(data, ans, RSA.Prefixes) + // .map(new ConjunctiveQueryAnswers(query, query.variables, _)) + // .get + // } + + // RDFoxUtil.closeConnection(server, data) + + // answers + // }, + // "Answers computation", + // Logger.DEBUG + //) /** Query the RDFox data store used for query answering. * diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala index 2774cb1..d6ad8c5 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala @@ -46,6 +46,9 @@ object FilteringProgram extends Versioned[FilterType] { */ trait FilteringProgram { + /** Named graph used for filtering process */ + val graph: String + /** Query from which the filtering program is generated */ val query: ConjunctiveQuery diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala index 45dd867..3d9717c 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala @@ -36,8 +36,8 @@ object NaiveFilteringProgram { * * @param query CQ to be converted into logic rules. */ - def apply(query: ConjunctiveQuery): FilteringProgram = - new NaiveFilteringProgram(query) + def apply(graph: String, query: ConjunctiveQuery): FilteringProgram = + new NaiveFilteringProgram(graph, query) } /** Filtering Program generator @@ -47,7 +47,7 @@ object NaiveFilteringProgram { * * Instances can be created using the companion object. */ -class NaiveFilteringProgram(val query: ConjunctiveQuery) +class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) extends FilteringProgram { /** Extends capabilities of @@ -321,5 +321,6 @@ class NaiveFilteringProgram(val query: ConjunctiveQuery) r9 :: List()) map RDFoxUtil.reify } - val answerQuery = RDFoxUtil.buildDescriptionQuery("Ans", query.answer.size) + val answerQuery = + RDFoxUtil.buildDescriptionQuery(graph, "Ans", query.answer.size) } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala index 4a4e65c..5d11369 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala @@ -422,12 +422,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) s""" SELECT $answer WHERE { - ?K a rsa:Ans . + GRAPH <$graph> { ?K a rsa:Ans } . TT { $answer $bounded ?K } . } """ } else { - "ASK { ?X a rsa:Ans }" + s"ASK { GRAPH <$graph> { ?X a rsa:Ans } }" } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index 620d2dd..6f5ff31 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -124,13 +124,14 @@ object RDFoxUtil { def addRules(data: DataStoreConnection, rules: Seq[Rule]): Unit = Logger.timed( if (rules.length > 0) { - data.importData( - UpdateType.ADDITION, - RSA.Prefixes, - rules - .map(_.toString(Prefixes.s_emptyPrefixes)) - .mkString("\n") - ) + data addRules rules + // data.importData( + // UpdateType.ADDITION, + // RSA.Prefixes, + // rules + // .map(_.toString(Prefixes.s_emptyPrefixes)) + // .mkString("\n") + // ) }, s"Loading ${rules.length} rules", Logger.DEBUG @@ -141,10 +142,15 @@ object RDFoxUtil { * @param data datastore connection * @param facts collection of facts to be added to the data store */ - def addFacts(data: DataStoreConnection, facts: Seq[TupleTableAtom]): Unit = + def addFacts( + graph: String, + data: DataStoreConnection, + facts: Seq[TupleTableAtom] + ): Unit = Logger.timed( if (facts.length > 0) { data.importData( + graph, UpdateType.ADDITION, RSA.Prefixes, facts @@ -161,14 +167,10 @@ object RDFoxUtil { * @param data datastore connection. * @param files sequence of files to upload. */ - def addData(data: DataStoreConnection, files: File*): Unit = + def addData(graph: String, data: DataStoreConnection, files: File*): Unit = Logger.timed( files.foreach { - data.importData( - UpdateType.ADDITION, - RSA.Prefixes, - _ - ) + data.importData(graph, UpdateType.ADDITION, RSA.Prefixes, _) }, "Loading data files", Logger.DEBUG @@ -315,11 +317,13 @@ object RDFoxUtil { * compatible with RDFox engine. This helper allows to build a query * to gather all instances of an internal predicate * + * @param graph named graph to query for the provided predicate * @param pred name of the predicate to describe. * @param arity arity of the predicate. * @return a string containing a SPARQL query. */ def buildDescriptionQuery( + graph: String, pred: String, arity: Int ): String = { @@ -328,12 +332,12 @@ object RDFoxUtil { s""" SELECT $variables WHERE { - ?K a rsa:$pred. - TT { $variables ?K } . + GRAPH <$graph> { ?K a rsa:$pred }. + TT { $variables ?K } . } """ } else { - "ASK { ?X a rsa:Ans }" + s"ASK { GRAPH <$graph> { ?X a rsa:Ans } }" } } -- cgit v1.2.3 From bc37ee9293d8a4098edce2a77db6efa3d87b6dd2 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Thu, 30 Sep 2021 18:10:01 +0100 Subject: Make canonical model generation parametric over named graph --- .../scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala | 35 +++++++----- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 64 +++++++++++++++------- .../ox/cs/rsacomb/converter/RDFoxConverter.scala | 21 ++++--- .../uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala | 22 +++----- .../scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 29 ++++++++-- src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala | 53 ++++++++++-------- 6 files changed, 141 insertions(+), 83 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala index 3467d3c..a39b9c0 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala @@ -31,7 +31,8 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{ BodyFormula, Negation, Rule, - TupleTableAtom + TupleTableAtom, + TupleTableName } import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term, Variable} @@ -48,8 +49,9 @@ import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA} * (via materialization). * * @param ontology the RSA ontology the canonical model is targeting. + * @param graph the graph the canonical model will be generated into. */ -class CanonicalModel(val ontology: RSAOntology) { +class CanonicalModel(val ontology: RSAOntology, val graph: IRI) { /** Simplify conversion between OWLAPI and RDFox concepts */ import implicits.RDFox._ @@ -65,6 +67,7 @@ class CanonicalModel(val ontology: RSAOntology) { * versions need to be explicitly stated in terms of logic rules. */ val rolesAdditionalRules: List[Rule] = { + val tt = TupleTableName.create(graph.getIRI) ontology.roles .collect { case prop: OWLObjectProperty => prop } .flatMap((pred) => { @@ -83,8 +86,8 @@ class CanonicalModel(val ontology: RSAOntology) { ) ) yield Rule.create( - TupleTableAtom.rdf(varX, iri :: hSuffix, varY), - TupleTableAtom.rdf(varX, iri :: bSuffix, varY) + TupleTableAtom.create(tt, varX, iri :: hSuffix, varY), + TupleTableAtom.create(tt, varX, iri :: bSuffix, varY) ) }) } @@ -108,6 +111,8 @@ class CanonicalModel(val ontology: RSAOntology) { object CanonicalModelConverter extends RDFoxConverter { + override val graph = TupleTableName.create(CanonicalModel.this.graph.getIRI) + private def rules1( axiom: OWLSubClassOfAxiom ): Result = { @@ -115,11 +120,10 @@ class CanonicalModel(val ontology: RSAOntology) { // Fresh Variables val v0 = RSA("v0_" ++ axiom.hashed) val varX = Variable.create("X") - implicit val unfoldTerm = RSA(unfold.hashCode.toString) // TODO: use axiom.toTriple instead val atomA: TupleTableAtom = { val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI - TupleTableAtom.rdf(varX, IRI.RDF_TYPE, cls) + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, cls) } val roleRf: TupleTableAtom = { val prop = @@ -132,12 +136,15 @@ class CanonicalModel(val ontology: RSAOntology) { .getFiller .asInstanceOf[OWLClass] .getIRI - TupleTableAtom.rdf(v0, IRI.RDF_TYPE, cls) + TupleTableAtom.create(graph, v0, IRI.RDF_TYPE, cls) } - val facts = unfold map RSA.In + val unfoldSet = RSA(unfold.hashCode.toString) + val facts = unfold.map(TupleTableAtom.create(graph, _, RSA.IN, unfoldSet)) + val notInX = + Negation.create(TupleTableAtom.create(graph, varX, RSA.IN, unfoldSet)) val rules = List( - Rule.create(roleRf, atomA, RSA.NotIn(varX)), - Rule.create(atomB, atomA, RSA.NotIn(varX)) + Rule.create(roleRf, atomA, notInX), + Rule.create(atomB, atomA, notInX) ) (facts, rules) } @@ -155,7 +162,7 @@ class CanonicalModel(val ontology: RSAOntology) { // Predicates def atomA(t: Term): TupleTableAtom = { val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI - TupleTableAtom.rdf(t, IRI.RDF_TYPE, cls) + TupleTableAtom.create(graph, t, IRI.RDF_TYPE, cls) } def roleRf(t1: Term, t2: Term): TupleTableAtom = super.convert(roleR, t1, t2, Forward) @@ -165,7 +172,7 @@ class CanonicalModel(val ontology: RSAOntology) { .getFiller .asInstanceOf[OWLClass] .getIRI - TupleTableAtom.rdf(t, IRI.RDF_TYPE, cls) + TupleTableAtom.create(graph, t, IRI.RDF_TYPE, cls) } //Rules List( @@ -190,7 +197,7 @@ class CanonicalModel(val ontology: RSAOntology) { // Predicates def atomA(t: Term): TupleTableAtom = { val cls = axiom.getSubClass.asInstanceOf[OWLClass].getIRI - TupleTableAtom.rdf(t, IRI.RDF_TYPE, cls) + TupleTableAtom.create(graph, t, IRI.RDF_TYPE, cls) } def roleRf(t: Term): TupleTableAtom = super.convert(roleR, t, v1, Forward) @@ -200,7 +207,7 @@ class CanonicalModel(val ontology: RSAOntology) { .getFiller .asInstanceOf[OWLClass] .getIRI - TupleTableAtom.rdf(v1, IRI.RDF_TYPE, cls) + TupleTableAtom.create(graph, v1, IRI.RDF_TYPE, cls) } cycle.flatMap { x => List( diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 6e9a119..5a89bf9 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -48,6 +48,7 @@ import tech.oxfordsemantic.jrdfox.Prefixes import tech.oxfordsemantic.jrdfox.logic.datalog.{ Rule, TupleTableAtom, + TupleTableName, Negation, BodyFormula } @@ -91,6 +92,20 @@ object RSAOntology { /** Name of the RDFox data store used for CQ answering */ private val DataStore = "answer_computation" + /** Canonical model named graph */ + private val CanonGraph: IRI = + RDFoxUtil.getNamedGraph(DataStore, "CanonicalModel") + + /** Filtering program named graph + * + * @param query query associated with the returned named graph. + * + * @return named graph for the filtering program associated with the + * input query. + */ + private def FilterGraph(query: ConjunctiveQuery): IRI = + RDFoxUtil.getNamedGraph(DataStore, s"Filter${query.id}") + /** Filtering program for a given query * * @param query the query to derive the filtering program @@ -343,11 +358,12 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) private val topAxioms: List[Rule] = { val varX = Variable.create("X") val varY = Variable.create("Y") + val graph = TupleTableName.create(RSAOntology.CanonGraph.getIRI) concepts .map(c => { Rule.create( - RSA.Thing(varX), - TupleTableAtom.rdf(varX, IRI.RDF_TYPE, c.getIRI) + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING), + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, c.getIRI) ) }) ++ roles.map(r => { val name = r match { @@ -356,8 +372,11 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) x.getInverse.getNamedProperty.getIRI.getIRIString :: Inverse } Rule.create( - List(RSA.Thing(varX), RSA.Thing(varY)), - List(TupleTableAtom.rdf(varX, name, varY)) + List( + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING), + TupleTableAtom.create(graph, varY, IRI.RDF_TYPE, IRI.THING) + ), + List(TupleTableAtom.create(graph, varX, name, varY)) ) }) } @@ -382,23 +401,31 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) val varX = Variable.create("X") val varY = Variable.create("Y") val varZ = Variable.create("Z") - List( + val graph = TupleTableName.create(RSAOntology.CanonGraph.getIRI) + // Equality properties + val properties = List( // Reflexivity - Rule.create(RSA.Congruent(varX, varX), RSA.Thing(varX)), + Rule.create( + TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varX), + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING) + ), // Simmetry - Rule.create(RSA.Congruent(varY, varX), RSA.Congruent(varX, varY)), + Rule.create( + TupleTableAtom.create(graph, varY, RSA.CONGRUENT, varX), + TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varY) + ), // Transitivity Rule.create( - RSA.Congruent(varX, varZ), - RSA.Congruent(varX, varY), - RSA.Congruent(varY, varZ) + TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varZ), + TupleTableAtom.create(graph, varX, RSA.CONGRUENT, varY), + TupleTableAtom.create(graph, varY, RSA.CONGRUENT, varZ) ) ) } /** Canonical model of the ontology */ lazy val canonicalModel = Logger.timed( - new CanonicalModel(this), + new CanonicalModel(this, RSAOntology.CanonGraph), "Generating canonical model program", Logger.DEBUG ) @@ -505,19 +532,18 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) * @return a collection of answers for each query. */ def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] = { + /* Open connection with RDFox server */ val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) - val canonNamedGraph = "http://cs.ox.ac.uk/isg/RSAComb#CanonicalModel" - // Create a new NamedGraph for the canonical model - data.createTupleTable(canonNamedGraph, Map("type" -> "named-graph").asJava) /* Upload data from data file */ - RDFoxUtil.addData(canonNamedGraph, data, datafiles: _*) + RDFoxUtil.addData(data, RSAOntology.CanonGraph, datafiles: _*) /* Top / equality axiomatization */ RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) /* Generate `named` predicates */ + // TODO: do I need both to generate all NAMED atoms? RDFoxUtil.addFacts( - canonNamedGraph, data, + RSAOntology.CanonGraph, (individuals ++ literals) map RSA.Named ) data.evaluateUpdate( @@ -525,9 +551,9 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) RSA.Prefixes, s""" INSERT { - GRAPH <$canonNamedGraph> { ?X a rsa:Named } + GRAPH ${RSAOntology.CanonGraph} { ?X a ${RSA.NAMED} } } WHERE { - GRAPH <$canonNamedGraph> { ?X a owl:Thing } + GRAPH ${RSAOntology.CanonGraph} { ?X a ${IRI.THING} } } """, new java.util.HashMap[String, String] @@ -538,7 +564,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) RDFoxUtil.addRules(data, this.canonicalModel.rules) Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}" - RDFoxUtil.addFacts(canonNamedGraph, data, this.canonicalModel.facts) + RDFoxUtil.addFacts(data, RSAOntology.CanonGraph, this.canonicalModel.facts) queries map { query => { diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala index 276ee1a..2f48798 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/converter/RDFoxConverter.scala @@ -24,7 +24,8 @@ import tech.oxfordsemantic.jrdfox.logic.datalog.{ BindAtom, BodyFormula, Rule, - TupleTableAtom + TupleTableAtom, + TupleTableName } import tech.oxfordsemantic.jrdfox.logic.expression.{Term, IRI, FunctionCall} import uk.ac.ox.cs.rsacomb.RSAOntology @@ -59,6 +60,10 @@ trait RDFoxConverter { private val manager = OWLManager.createOWLOntologyManager() private val factory = manager.getOWLDataFactory() + /** Default named graph to be used when generating new atoms */ + val graph: TupleTableName = + TupleTableName.create("http://oxfordsemantic.tech/RDFox#DefaultTriples") + /** Represents the result of the conversion of a * [[org.semanticweb.owlapi.model.OWLClassExpression OWLClassExpression]]. * @@ -193,7 +198,8 @@ trait RDFoxConverter { .flatMap((cls) => convert(cls, term, unsafe, NoSkolem, suffix)(fresh)._1 ) - val bottom = TupleTableAtom.rdf(term, IRI.RDF_TYPE, IRI.NOTHING) + val bottom = + TupleTableAtom.create(graph, term, IRI.RDF_TYPE, IRI.NOTHING) ResultR(List(Rule.create(bottom, body: _*))) } @@ -310,7 +316,7 @@ trait RDFoxConverter { */ case e: OWLClass => { val iri: IRI = if (e.isTopEntity()) IRI.THING else e.getIRI - val atom = TupleTableAtom.rdf(term, IRI.RDF_TYPE, iri) + val atom = TupleTableAtom.create(graph, term, IRI.RDF_TYPE, iri) (List(atom), List()) } @@ -340,7 +346,8 @@ trait RDFoxConverter { .collect { case x: OWLNamedIndividual => x } if (named.length != 1) throw new RuntimeException(s"Class expression '$e' has arity != 1.") - val atom = RSA.Congruent(term, named.head.getIRI) + val atom = + TupleTableAtom.create(graph, term, RSA.CONGRUENT, named.head.getIRI) (List(atom), List()) } @@ -412,7 +419,7 @@ trait RDFoxConverter { val (res, ext) = vars.map(convert(cls, _, unsafe, skolem, suffix)(fresh)).unzip val props = vars.map(convert(role, term, _, suffix)(fresh)) - val eq = RSA.Congruent(y, z) + val eq = TupleTableAtom.create(graph, y, RSA.CONGRUENT, z) (List(eq), res.flatten ++ props) } @@ -515,7 +522,7 @@ trait RDFoxConverter { */ case e: OWLObjectProperty => { val role = IRI.create(e.getIRI.getIRIString :: suffix) - TupleTableAtom.rdf(term1, role, term2) + TupleTableAtom.create(graph, term1, role, term2) } /** Inverse of a named role/property @@ -555,7 +562,7 @@ trait RDFoxConverter { */ case e: OWLDataProperty => { val role = IRI.create(e.getIRI.getIRIString :: suffix) - TupleTableAtom.rdf(term1, role, term2) + TupleTableAtom.create(graph, term1, role, term2) } /** The infamous impossible case. diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala index 795e039..ff48f1f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala @@ -51,19 +51,14 @@ object RSAAtom { import RDFox._ import JavaCollections._ - val name: String = atom.getTupleTableName.getName + val tt: TupleTableName = atom.getTupleTableName val args: List[Term] = atom.getArguments - val isRDF: Boolean = - name == "http://oxfordsemantic.tech/RDFox#DefaultTriples" + val isRDF: Boolean = atom.getArguments.length == 3 - val isClassAssertion: Boolean = { - isRDF && { - val pred = atom.getArguments.get(1) - pred == IRI.RDF_TYPE - } - } + val isClassAssertion: Boolean = + isRDF && atom.getArguments.get(1) == IRI.RDF_TYPE val isRoleAssertion: Boolean = isRDF && !isClassAssertion @@ -77,18 +72,15 @@ object RSAAtom { case iri: IRI => IRI.create(iri.getIRI :: suffix) case other => other } - TupleTableAtom.rdf(subj, pred, obj1) + TupleTableAtom.create(tt, subj, pred, obj1) } else { val pred1 = pred match { case iri: IRI => IRI.create(iri.getIRI :: suffix) case other => other } - TupleTableAtom.rdf(subj, pred1, obj) + TupleTableAtom.create(tt, subj, pred1, obj) } - } else { - val ttname = TupleTableName.create(name :: suffix) - TupleTableAtom.create(ttname, atom.getArguments()) - } + } else atom def reified(implicit fresh: DataFactory diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index 6f5ff31..568858c 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -17,6 +17,7 @@ package uk.ac.ox.cs.rsacomb.util import java.io.{OutputStream, File, StringReader} +import scala.collection.JavaConverters._ import tech.oxfordsemantic.jrdfox.Prefixes import tech.oxfordsemantic.jrdfox.client.{ ComponentInfo, @@ -38,7 +39,8 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{ Literal, Resource, Variable, - Term + Term, + IRI } import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery @@ -92,6 +94,22 @@ object RDFoxUtil { (server, data) } + /** Get the IRI of a named graph (creating it if necessary) + * + * @param datastore name of the datastore to perform the action in. + * @param name name of the named graph. + * + * @return the full IRI for the (new) named graph. + */ + def getNamedGraph(datastore: String, name: String): IRI = { + val graph = RSA(name) + val (server, data) = openConnection(datastore) + if (!data.containsTupleTable(graph.getIRI)) + data.createTupleTable(graph.getIRI, Map("type" -> "named-graph").asJava) + RDFoxUtil.closeConnection(server, data) + return graph + } + /** Create a built-in `rdfox:SKOLEM` TupleTableAtom. */ def skolem(name: String, terms: Term*): TupleTableAtom = TupleTableAtom.create( @@ -143,14 +161,14 @@ object RDFoxUtil { * @param facts collection of facts to be added to the data store */ def addFacts( - graph: String, data: DataStoreConnection, + graph: IRI, facts: Seq[TupleTableAtom] ): Unit = Logger.timed( if (facts.length > 0) { data.importData( - graph, + graph.getIRI, UpdateType.ADDITION, RSA.Prefixes, facts @@ -165,12 +183,13 @@ object RDFoxUtil { /** Imports a sequence of files directly into a datastore. * * @param data datastore connection. + * @param graph named graph where the data should be uploaded * @param files sequence of files to upload. */ - def addData(graph: String, data: DataStoreConnection, files: File*): Unit = + def addData(data: DataStoreConnection, graph: IRI, files: File*): Unit = Logger.timed( files.foreach { - data.importData(graph, UpdateType.ADDITION, RSA.Prefixes, _) + data.importData(graph.getIRI, UpdateType.ADDITION, RSA.Prefixes, _) }, "Loading data files", Logger.DEBUG diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala index 8b341ba..96d3aa8 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala @@ -42,32 +42,29 @@ import scala.collection.JavaConverters._ object RSA { + /** Set of default prefixes to be included in all datastore operations */ val Prefixes: Prefixes = new Prefixes() - Prefixes.declarePrefix("rsa:", "http://www.cs.ox.ac.uk/isg/rsa/") + Prefixes.declarePrefix("rsacomb:", "http://www.cs.ox.ac.uk/isg/RSAComb#") + Prefixes.declarePrefix("rdfox:", "http://oxfordsemantic.tech/RDFox#") Prefixes.declarePrefix("owl:", "http://www.w3.org/2002/07/owl#") - val CONGRUENT = RSA("congruent") - val NAMED = RSA("Named") - - private def atom(name: IRI, vars: List[Term]): TupleTableAtom = - TupleTableAtom.create(TupleTableName.create(name.getIRI), vars: _*) - - def E(t1: Term, t2: Term) = - TupleTableAtom.rdf(t1, RSA("E"), t2) - - def PE(t1: Term, t2: Term) = - TupleTableAtom.rdf(t1, RSA("PE"), t2) + /** Creates a `rsacomb:` IRI */ + def apply(name: Any): IRI = + IRI.create( + Prefixes.getPrefixIRIsByPrefixName.get("rsacomb:").getIRI + name.toString + ) - def U(t: Term) = - TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("U")) + val NAMED = RSA("Named") + val CONGRUENT = RSA("congruent") + val IN = RSA("In") - def In(t: Term)(implicit set: Term) = - TupleTableAtom.rdf(t, RSA("In"), set) + // def In(t: Term)(implicit set: Term) = + // TupleTableAtom.rdf(t, RSA("In"), set) - def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set)) + // def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set)) - def Congruent(t1: Term, t2: Term) = - TupleTableAtom.rdf(t1, RSA("congruent"), t2) + // def Congruent(t1: Term, t2: Term) = + // TupleTableAtom.rdf(t1, RSA("congruent"), t2) def QM(implicit q: ConjunctiveQuery) = atom(RSA("QM"), q.answer ::: q.bounded) @@ -104,8 +101,18 @@ object RSA { atom(RSA("Ans"), q.answer) } - def apply(name: Any): IRI = - IRI.create( - Prefixes.getPrefixIRIsByPrefixName.get("rsa:").getIRI + name.toString - ) + /* TODO: review after reworking the dependency graph construction */ + + // private def atom(name: IRI, vars: List[Term]): TupleTableAtom = + // TupleTableAtom.create(TupleTableName.create(name.getIRI), vars: _*) + + def E(t1: Term, t2: Term) = + TupleTableAtom.rdf(t1, RSA("E"), t2) + + def PE(t1: Term, t2: Term) = + TupleTableAtom.rdf(t1, RSA("PE"), t2) + + def U(t: Term) = + TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("U")) + } -- cgit v1.2.3 From 360e10e686d144b918825939f48004aebc31b7f3 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Fri, 1 Oct 2021 16:16:31 +0100 Subject: Rework naive filtering program computation to use named graphs --- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 16 +- .../ox/cs/rsacomb/filtering/FilteringProgram.scala | 33 +++- .../rsacomb/filtering/NaiveFilteringProgram.scala | 196 +++++++++++++++++---- .../uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala | 26 +-- .../ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala | 68 ++++--- .../scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 53 +----- src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala | 72 +++++--- 7 files changed, 298 insertions(+), 166 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 5a89bf9..6a3dca2 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -111,12 +111,13 @@ object RSAOntology { * @param query the query to derive the filtering program * @return the filtering program for the given query */ - def filteringProgram( - graph: String, - query: ConjunctiveQuery - ): FilteringProgram = + def filteringProgram(query: ConjunctiveQuery): FilteringProgram = Logger.timed( - FilteringProgram(FilterType.REVISED)(query), + { + val filter = + FilteringProgram(FilterType.REVISED, CanonGraph, FilterGraph(query)) + filter(query) + }, "Generating filtering program", Logger.DEBUG ) @@ -568,9 +569,8 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) queries map { query => { - val filterNamedGraph = - s"http://cs.ox.ac.uk/isg/RSAComb#Filter${query.id}" - val filter = RSAOntology.filteringProgram(filterNamedGraph, query) + //val graph = RSAOntology.FilterGraph(query) + val filter = RSAOntology.filteringProgram(query) /* Add filtering program */ Logger print s"Filtering program rules: ${filter.rules.length}" RDFoxUtil.addRules(data, filter.rules) diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala index d6ad8c5..3015def 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala @@ -17,25 +17,43 @@ package uk.ac.ox.cs.rsacomb.filtering import tech.oxfordsemantic.jrdfox.logic.datalog.Rule +import tech.oxfordsemantic.jrdfox.logic.expression.IRI import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery import uk.ac.ox.cs.rsacomb.util.Versioned +/** Type of filtering strategy. + * + * Mainly for testing different approaches and techniques. + */ sealed trait FilterType object FilterType { case object NAIVE extends FilterType case object REVISED extends FilterType } +/** Filtering program trait */ object FilteringProgram extends Versioned[FilterType] { import FilterType._ type Result = (ConjunctiveQuery) => FilteringProgram - def apply(t: FilterType): (ConjunctiveQuery) => FilteringProgram = - t match { - case NAIVE => NaiveFilteringProgram(_) - case REVISED => RevisedFilteringProgram(_) + /** Returns the right type of filtering program builder. + * + * @param filter type of filtering program. + * @param source source named graph for the filtering program. + * @param target target named graph for the filtering program. + * + * @return the right type of filtering program builder. + */ + def apply( + filter: FilterType, + source: IRI, + target: IRI + ): (ConjunctiveQuery) => FilteringProgram = + filter match { + case NAIVE => NaiveFilteringProgram(source, target, _) + case REVISED => RevisedFilteringProgram(source, target, _) } } @@ -46,8 +64,11 @@ object FilteringProgram extends Versioned[FilterType] { */ trait FilteringProgram { - /** Named graph used for filtering process */ - val graph: String + /** Source named graph for the filtering process */ + val source: IRI + + /** Target named graph for the filtering process */ + val target: IRI /** Query from which the filtering program is generated */ val query: ConjunctiveQuery diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala index 3d9717c..1777713 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala @@ -21,23 +21,35 @@ import tech.oxfordsemantic.jrdfox.logic.Datatype import tech.oxfordsemantic.jrdfox.logic.datalog.{ Rule, TupleTableAtom, + TupleTableName, BodyFormula, Negation } -import tech.oxfordsemantic.jrdfox.logic.expression.{Term, Variable} +import tech.oxfordsemantic.jrdfox.logic.expression.{ + IRI, + Literal, + Term, + Variable +} import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery -import uk.ac.ox.cs.rsacomb.suffix.{Forward, Backward} -import uk.ac.ox.cs.rsacomb.util.{RSA, RDFoxUtil} +import uk.ac.ox.cs.rsacomb.suffix.{Forward, Backward, Nth} +import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA, RDFoxUtil} /** Factory for [[uk.ac.ox.cs.rsacomb.FilteringProgram FilteringProgram]] */ object NaiveFilteringProgram { /** Create a new FilteringProgram instance. * + * @param source source named graph for the filtering program. + * @param target target named graph for the filtering program. * @param query CQ to be converted into logic rules. */ - def apply(graph: String, query: ConjunctiveQuery): FilteringProgram = - new NaiveFilteringProgram(graph, query) + def apply( + source: IRI, + target: IRI, + query: ConjunctiveQuery + ): FilteringProgram = + new NaiveFilteringProgram(source, target, query) } /** Filtering Program generator @@ -47,14 +59,23 @@ object NaiveFilteringProgram { * * Instances can be created using the companion object. */ -class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) - extends FilteringProgram { +class NaiveFilteringProgram( + val source: IRI, + val target: IRI, + val query: ConjunctiveQuery +) extends FilteringProgram { /** Extends capabilities of * [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]] */ import uk.ac.ox.cs.rsacomb.implicits.RSAAtom._ + /** Simplify conversion to RDFox specific types */ + import uk.ac.ox.cs.rsacomb.implicits.RDFox._ + + /** Simplify conversion between Java and Scala `List`s */ + import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ + /** Implicit parameter used in RSA internal predicates. * * @see [[uk.ac.ox.cs.rsacomb.util.RSA]] for more information. @@ -72,6 +93,13 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) private val varU = Variable.create("U") private val varW = Variable.create("W") + /** `TupleTableName`s for the source/targer named graphs */ + val tts: TupleTableName = TupleTableName.create(source.getIRI) + implicit val ttt: TupleTableName = TupleTableName.create(target.getIRI) + + /** Set of atoms in the body of the query */ + val queryBody: List[TupleTableAtom] = query.atoms(tts) + /** Rule generating the instances of the predicate `rsa:NI`. * * According to the original paper, the set of `rsa:NI` is defined as @@ -88,7 +116,11 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) * generate in the filtering program using a logic rule. */ val nis: Rule = - Rule.create(RSA.NI(varX), RSA.Congruent(varX, varY), RSA.Named(varY)) + Rule.create( + RSA.NI(varX), + RSA.Congruent(varX, varY)(tts), + RSA.Named(varY)(tts) + ) /** Collection of filtering program rules. */ val rules: List[Rule] = @@ -101,7 +133,7 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) * * @note corresponds to rule 1 in Table 3 in the paper. */ - val r1 = Rule.create(RSA.QM, query.atoms: _*) + val r1 = Rule.create(RSA.QM, queryBody: _*) /** Initializes instances of `rsa:ID`. * @@ -123,10 +155,10 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) * @note corresponds to rules 4x in Table 3. */ val r4a = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) index1 = query.bounded indexOf (role1.getArguments get 2) if index1 >= 0 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) index2 = query.bounded indexOf (role2.getArguments get 2) if index2 >= 0 } yield Rule.create( @@ -134,13 +166,20 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) RSA.ID(RSA(index1), RSA(index2)), role1 << Forward, role2 << Forward, - not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 0)) + not( + TupleTableAtom.create( + tts, + role1.getArguments get 0, + RSA.CONGRUENT, + role2.getArguments get 0 + ) + ) ) val r4b = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) index1 = query.bounded indexOf (role1.getArguments get 2) if index1 >= 0 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) index2 = query.bounded indexOf (role2.getArguments get 0) if index2 >= 0 } yield Rule.create( @@ -148,13 +187,20 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) RSA.ID(RSA(index1), RSA(index2)), role1 << Forward, role2 << Backward, - not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 2)) + not( + TupleTableAtom.create( + tts, + role1.getArguments get 0, + RSA.CONGRUENT, + role2.getArguments get 2 + ) + ) ) val r4c = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) index1 = query.bounded indexOf (role1.getArguments get 0) if index1 >= 0 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) index2 = query.bounded indexOf (role2.getArguments get 0) if index2 >= 0 } yield Rule.create( @@ -162,7 +208,14 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) RSA.ID(RSA(index1), RSA(index2)), role1 << Backward, role2 << Backward, - not(RSA.Congruent(role1.getArguments get 2, role2.getArguments get 2)) + not( + TupleTableAtom.create( + tts, + role1.getArguments get 2, + RSA.CONGRUENT, + role2.getArguments get 2 + ) + ) ) /** Recursively propagates `rsa:ID` predicate. @@ -170,12 +223,12 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) * @note corresponds to rules 5x in Table 3. */ val r5a = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) r1arg0 = role1.getArguments get 0 if query.bounded contains r1arg0 r1arg2 = role1.getArguments get 2 if query.bounded contains r1arg2 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) r2arg0 = role2.getArguments get 0 if query.bounded contains r2arg0 r2arg2 = role2.getArguments get 2 @@ -189,18 +242,18 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) RSA(query.bounded indexOf r1arg2), RSA(query.bounded indexOf r2arg2) ), - RSA.Congruent(r1arg0, r2arg0), + TupleTableAtom.create(tts, r1arg0, RSA.CONGRUENT, r2arg0), role1 << Forward, role2 << Forward, not(RSA.NI(r1arg0)) ) val r5b = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) r1arg0 = role1.getArguments get 0 if query.bounded contains r1arg0 r1arg2 = role1.getArguments get 2 if query.bounded contains r1arg2 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) r2arg0 = role2.getArguments get 0 if query.bounded contains r2arg0 r2arg2 = role2.getArguments get 2 @@ -214,18 +267,18 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) RSA(query.bounded indexOf r1arg2), RSA(query.bounded indexOf r2arg0) ), - RSA.Congruent(r1arg0, r2arg2), + TupleTableAtom.create(tts, r1arg0, RSA.CONGRUENT, r2arg2), role1 << Forward, role2 << Backward, not(RSA.NI(r1arg0)) ) val r5c = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) r1arg0 = role1.getArguments get 0 if query.bounded contains r1arg0 r1arg2 = role1.getArguments get 2 if query.bounded contains r1arg2 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) r2arg0 = role2.getArguments get 0 if query.bounded contains r2arg0 r2arg2 = role2.getArguments get 2 @@ -239,7 +292,7 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) RSA(query.bounded indexOf r1arg0), RSA(query.bounded indexOf r2arg0) ), - RSA.Congruent(r1arg2, r2arg2), + TupleTableAtom.create(tts, r1arg2, RSA.CONGRUENT, r2arg2), role1 << Backward, role2 << Backward, not(RSA.NI(r1arg2)) @@ -254,14 +307,14 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) * @note corresponds to rules 6,7x in Table 3. */ val r6 = for { - role <- query.atoms filter (_.isRoleAssertion) + role <- queryBody filter (_.isRoleAssertion) index0 = query.bounded indexOf (role.getArguments get 0) if index0 >= 0 index2 = query.bounded indexOf (role.getArguments get 2) if index2 >= 0 suffix <- Seq(Forward, Backward) } yield Rule.create( - RSA.AQ(varV, varW, suffix), + RSA.AQ(suffix, varV, varW), role << suffix, RSA.ID(RSA(index0), varV), RSA.ID(RSA(index2), varW) @@ -269,15 +322,15 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) val r7a = for (suffix <- List(Forward, Backward)) yield Rule.create( - RSA.TQ(varU, varV, suffix), - RSA.AQ(varU, varV, suffix) + RSA.TQ(suffix, varU, varV), + RSA.AQ(suffix, varU, varV) ) val r7b = for (suffix <- List(Forward, Backward)) yield Rule.create( - RSA.TQ(varU, varW, suffix), - RSA.AQ(varU, varV, suffix), - RSA.TQ(varV, varW, suffix) + RSA.TQ(suffix, varU, varW), + RSA.AQ(suffix, varU, varV), + RSA.TQ(suffix, varV, varW) ) /** Flag spurious answers. @@ -286,13 +339,19 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) */ val r8a = for (v <- query.answer) - yield Rule.create(RSA.SP, RSA.QM, not(RSA.Named(v))) + yield Rule.create( + RSA.SP, + RSA.QM, + not( + TupleTableAtom.create(tts, v, IRI.RDF_TYPE, RSA.NAMED) + ) + ) val r8b = Rule.create(RSA.SP, RSA.FK) val r8c = for (suffix <- List(Forward, Backward)) yield Rule.create( RSA.SP, - RSA.TQ(varV, varV, suffix) + RSA.TQ(suffix, varV, varV) ) /** Determine answers to the query @@ -318,9 +377,70 @@ class NaiveFilteringProgram(val graph: String, val query: ConjunctiveQuery) r5a ::: r5b ::: r5c ::: r6 ::: r7b ::: r7a ::: r8a ::: r8b :: r8c ::: - r9 :: List()) map RDFoxUtil.reify + r9 :: List()) map reify } + /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rule]]. + * + * This is needed because RDFox supports only predicates of arity 1 + * or 2, but the filtering program uses predicates with higher arity. + * + * @note we can perform a reification of the atoms thanks to the + * built-in `SKOLEM` funtion of RDFox. + */ + def reify(rule: Rule): Rule = { + val (sk, as) = rule.getHead.map(reify).unzip + val head: List[TupleTableAtom] = as.flatten + val skolem: List[BodyFormula] = sk.flatten + val body: List[BodyFormula] = rule.getBody.map(reify).flatten + Rule.create(head, skolem ::: body) + } + + /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.BodyFormula BodyFormula]]. */ + private def reify(formula: BodyFormula): List[BodyFormula] = { + formula match { + case atom: TupleTableAtom => reify(atom)._2 + case neg: Negation => { + val (sk, as) = neg.getNegatedAtoms + .map({ + case a: TupleTableAtom => reify(a) + case a => (None, List(a)) + }) + .unzip + val skolem = + sk.flatten.map(_.getArguments.last).collect { case v: Variable => v } + val atoms = as.flatten + List(Negation.create(skolem, atoms)) + } + case other => List(other) + } + } + + /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]]. */ + private def reify(atom: TupleTableAtom)(implicit + fresh: DataFactory + ): (Option[TupleTableAtom], List[TupleTableAtom]) = { + if (atom.getArguments.length == 3) { + (None, List(atom)) + } else { + val varS: Variable = fresh.getVariable + val (pred :: args): List[Term] = atom.getArguments + val name = pred.asInstanceOf[IRI].getIRI + val skolem = TupleTableAtom.create( + TupleTableName.SKOLEM, + Literal.create(name, Datatype.XSD_STRING) +: args :+ varS + ) + val triple = + TupleTableAtom.create(atom.getTupleTableName, varS, IRI.RDF_TYPE, pred) + val triples = args.zipWithIndex + .map { case (a, i) => + TupleTableAtom.create(atom.getTupleTableName, varS, name :: Nth(i), a) + } + (Some(skolem), triple :: triples) + } + } + val answerQuery = - RDFoxUtil.buildDescriptionQuery(graph, "Ans", query.answer.size) + RDFoxUtil.buildDescriptionQuery(target, RSA.ANS, query.answer.size) + } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala index ff48f1f..37c70df 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala @@ -82,18 +82,18 @@ object RSAAtom { } } else atom - def reified(implicit - fresh: DataFactory - ): (Option[TupleTableAtom], List[TupleTableAtom]) = - if (isRDF) { - (None, List(atom)) - } else { - val varS = fresh.getVariable - val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*) - val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name) - val atoms = args.zipWithIndex - .map { case (a, i) => TupleTableAtom.rdf(varS, name :: Nth(i), a) } - (Some(skolem), atom :: atoms) - } + // def reified(implicit + // fresh: DataFactory + // ): (Option[TupleTableAtom], List[TupleTableAtom]) = + // if (isRDF) { + // (None, List(atom)) + // } else { + // val varS = fresh.getVariable + // val skolem = RDFoxUtil.skolem(name, (args :+ varS): _*) + // val atom = TupleTableAtom.rdf(varS, IRI.RDF_TYPE, name) + // val atoms = args.zipWithIndex + // .map { case (a, i) => TupleTableAtom.rdf(varS, name :: Nth(i), a) } + // (Some(skolem), atom :: atoms) + // } } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala index c405008..73da80f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala @@ -19,7 +19,7 @@ package uk.ac.ox.cs.rsacomb.sparql import java.util.{Map => JMap, HashMap => JHashMap} import tech.oxfordsemantic.jrdfox.Prefixes import tech.oxfordsemantic.jrdfox.client.DataStoreConnection -import tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom +import tech.oxfordsemantic.jrdfox.logic.datalog.{TupleTableAtom, TupleTableName} import tech.oxfordsemantic.jrdfox.logic.expression.Variable import tech.oxfordsemantic.jrdfox.logic.sparql.pattern.{ ConjunctionPattern, @@ -99,37 +99,51 @@ class ConjunctiveQuery( val bcq: Boolean = select.isEmpty && !query.getAllPossibleVariables /** Returns the query body as a sequence of atoms (triples). */ - val atoms: List[TupleTableAtom] = - where match { - case b: ConjunctionPattern => { - b.getConjuncts.toList.flatMap { conj: QueryPattern => - conj match { - case c: TriplePattern => - Seq( - TupleTableAtom.rdf(c.getSubject, c.getPredicate, c.getObject) - ) - case _ => List() - } - } + def atoms(graph: TupleTableName): List[TupleTableAtom] = + where.collect { case c: ConjunctionPattern => + c.getConjuncts.collect { case t: TriplePattern => + TupleTableAtom + .create(graph, t.getSubject, t.getPredicate, t.getObject) } - case _ => List() - } + }.flatten + // where match { + // case b: ConjunctionPattern => { + // b.getConjuncts.toList.flatMap { conj: QueryPattern => + // conj match { + // case c: TriplePattern => + // Seq( + // TupleTableAtom.rdf(c.getSubject, c.getPredicate, c.getObject) + // ) + // case _ => List() + // } + // } + // } + // case _ => List() + // } /** Returns the full collection of variables involved in the query. */ - val variables: List[Variable] = (where match { - case b: ConjunctionPattern => { - b.getConjuncts.toList.flatMap { conj: QueryPattern => - conj match { - case c: TriplePattern => - Set(c.getSubject, c.getPredicate, c.getObject).collect { - case v: Variable => v - } - case _ => List() + val variables: List[Variable] = + where.collect { case c: ConjunctionPattern => + c.getConjuncts.collect { case t: TriplePattern => + Set(t.getSubject, t.getPredicate, t.getObject).collect { + case v: Variable => v } } - } - case _ => List() - }).distinct + }.distinct + // (where match { + // case b: ConjunctionPattern => { + // b.getConjuncts.toList.flatMap { conj: QueryPattern => + // conj match { + // case c: TriplePattern => + // Set(c.getSubject, c.getPredicate, c.getObject).collect { + // case v: Variable => v + // } + // case _ => List() + // } + // } + // } + // case _ => List() + // }).distinct /** Returns the collection of answer variables in the query. */ val answer: List[Variable] = diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index 568858c..217fa7f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -342,8 +342,8 @@ object RDFoxUtil { * @return a string containing a SPARQL query. */ def buildDescriptionQuery( - graph: String, - pred: String, + graph: IRI, + pred: IRI, arity: Int ): String = { if (arity > 0) { @@ -351,55 +351,12 @@ object RDFoxUtil { s""" SELECT $variables WHERE { - GRAPH <$graph> { ?K a rsa:$pred }. - TT { $variables ?K } . + GRAPH $graph { ?K a $pred }. + TT ${TupleTableName.SKOLEM} { $variables ?K } . } """ } else { - s"ASK { GRAPH <$graph> { ?X a rsa:Ans } }" - } - } - - /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rule]]. - * - * This is needed because RDFox supports only predicates of arity 1 - * or 2, but the filtering program uses predicates with higher arity. - * - * @note we can perform a reification of the atoms thanks to the - * built-in `SKOLEM` funtion of RDFox. - */ - def reify(rule: Rule): Rule = { - val (sk, as) = rule.getHead.map(_.reified).unzip - val head: List[TupleTableAtom] = as.flatten - val skolem: List[BodyFormula] = sk.flatten - val body: List[BodyFormula] = rule.getBody.map(reify).flatten - Rule.create(head, skolem ::: body) - } - - /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.BodyFormula BodyFormula]]. - * - * This is needed because RDFox supports only predicates of arity 1 - * or 2, but the filtering program uses predicates with higher arity. - * - * @note we can perform a reification of the atoms thanks to the - * built-in `SKOLEM` funtion of RDFox. - */ - private def reify(formula: BodyFormula): List[BodyFormula] = { - formula match { - case atom: TupleTableAtom => atom.reified._2 - case neg: Negation => { - val (sk, as) = neg.getNegatedAtoms - .map({ - case a: TupleTableAtom => a.reified - case a => (None, List(a)) - }) - .unzip - val skolem = - sk.flatten.map(_.getArguments.last).collect { case v: Variable => v } - val atoms = as.flatten - List(Negation.create(skolem, atoms)) - } - case other => List(other) + s"ASK { GRAPH $graph { ?X a $pred } }" } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala index 96d3aa8..40c5ced 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala @@ -42,6 +42,9 @@ import scala.collection.JavaConverters._ object RSA { + /** Simplify conversion between Java and Scala `List`s */ + import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ + /** Set of default prefixes to be included in all datastore operations */ val Prefixes: Prefixes = new Prefixes() Prefixes.declarePrefix("rsacomb:", "http://www.cs.ox.ac.uk/isg/RSAComb#") @@ -54,52 +57,69 @@ object RSA { Prefixes.getPrefixIRIsByPrefixName.get("rsacomb:").getIRI + name.toString ) - val NAMED = RSA("Named") + val ANS = RSA("Ans") val CONGRUENT = RSA("congruent") val IN = RSA("In") + val NAMED = RSA("Named") // def In(t: Term)(implicit set: Term) = // TupleTableAtom.rdf(t, RSA("In"), set) // def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set)) - // def Congruent(t1: Term, t2: Term) = - // TupleTableAtom.rdf(t1, RSA("congruent"), t2) + def Congruent(t1: Term, t2: Term)(implicit graph: TupleTableName) = + TupleTableAtom.create(graph, t1, RSA.CONGRUENT, t2) - def QM(implicit q: ConjunctiveQuery) = - atom(RSA("QM"), q.answer ::: q.bounded) + def Named(term: Term)(implicit graph: TupleTableName) = + TupleTableAtom.create(graph, term, IRI.RDF_TYPE, RSA.NAMED) - def ID(t1: Term, t2: Term)(implicit q: ConjunctiveQuery) = { - atom(RSA("ID"), (q.answer ::: q.bounded) :+ t1 :+ t2) - } + def QM(implicit query: ConjunctiveQuery, graph: TupleTableName) = + TupleTableAtom.create(graph, RSA("QM") :: query.answer ::: query.bounded) - def Named(t: Term) = - TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("Named")) + def ID(t1: Term, t2: Term)(implicit + query: ConjunctiveQuery, + graph: TupleTableName + ) = + TupleTableAtom.create( + graph, + RSA("ID") +: (query.answer ::: query.bounded) :+ t1 :+ t2 + ) - def Thing(t: Term) = - TupleTableAtom.rdf(t, IRI.RDF_TYPE, IRI.THING) + // def Thing(t: Term) = + // TupleTableAtom.rdf(t, IRI.RDF_TYPE, IRI.THING) - def NI(t: Term) = - TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("NI")) + def NI(term: Term)(implicit graph: TupleTableName) = + TupleTableAtom.create(graph, term, IRI.RDF_TYPE, RSA("NI")) - def TQ(t1: Term, t2: Term, sx: RSASuffix)(implicit q: ConjunctiveQuery) = - atom(RSA("TQ" :: sx), (q.answer ::: q.bounded) :+ t1 :+ t2) + def TQ(sx: RSASuffix, t1: Term, t2: Term)(implicit + query: ConjunctiveQuery, + graph: TupleTableName + ) = + TupleTableAtom.create( + graph, + RSA("TQ" :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2 + ) - def AQ(t1: Term, t2: Term, sx: RSASuffix)(implicit q: ConjunctiveQuery) = - atom(RSA("AQ" :: sx), (q.answer ::: q.bounded) :+ t1 :+ t2) + def AQ(sx: RSASuffix, t1: Term, t2: Term)(implicit + query: ConjunctiveQuery, + graph: TupleTableName + ) = + TupleTableAtom.create( + graph, + RSA("AQ" :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2 + ) - def FK(implicit q: ConjunctiveQuery) = - atom(RSA("FK"), q.answer ::: q.bounded) + def FK(implicit query: ConjunctiveQuery, graph: TupleTableName) = + TupleTableAtom.create(graph, RSA("FK") :: query.answer ::: query.bounded) - def SP(implicit q: ConjunctiveQuery) = - atom(RSA("SP"), q.answer ::: q.bounded) + def SP(implicit q: ConjunctiveQuery, graph: TupleTableName) = + TupleTableAtom.create(graph, RSA("SP") :: q.answer ::: q.bounded) - def Ans(implicit q: ConjunctiveQuery) = { + def Ans(implicit q: ConjunctiveQuery, graph: TupleTableName) = if (q.bcq) - TupleTableAtom.rdf(RSA("blank"), IRI.RDF_TYPE, RSA("Ans")) + TupleTableAtom.create(graph, RSA("blank"), IRI.RDF_TYPE, RSA.ANS) else - atom(RSA("Ans"), q.answer) - } + TupleTableAtom.create(graph, RSA.ANS :: q.answer) /* TODO: review after reworking the dependency graph construction */ -- cgit v1.2.3 From 424d5d6fcabb410622907c095364903577014765 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sat, 2 Oct 2021 09:15:43 +0100 Subject: Rework revides filtering program computation to use named graphs --- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 7 +- .../rsacomb/filtering/NaiveFilteringProgram.scala | 143 ++++++++------ .../filtering/RevisedFilteringProgram.scala | 215 +++++++++++---------- .../uk/ac/ox/cs/rsacomb/implicits/RDFox.scala | 4 + .../uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala | 53 ++--- .../uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala | 24 ++- src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala | 70 ++----- 7 files changed, 254 insertions(+), 262 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 6a3dca2..993e9df 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -545,7 +545,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) RDFoxUtil.addFacts( data, RSAOntology.CanonGraph, - (individuals ++ literals) map RSA.Named + (individuals ++ literals) map RSA.Named(RSAOntology.CanonGraph) ) data.evaluateUpdate( null, // the base IRI for the query (if null, a default is used) @@ -569,13 +569,12 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) queries map { query => { - //val graph = RSAOntology.FilterGraph(query) val filter = RSAOntology.filteringProgram(query) + /* Add filtering program */ Logger print s"Filtering program rules: ${filter.rules.length}" RDFoxUtil.addRules(data, filter.rules) - - // We remove the rules, should we drop the tuple table as well? + // TODO: We remove the rules, should we drop the tuple table as well? data.clearRulesAxiomsExplicateFacts() /* Gather answers to the query */ diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala index 1777713..6174c9d 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/NaiveFilteringProgram.scala @@ -32,7 +32,7 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{ Variable } import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery -import uk.ac.ox.cs.rsacomb.suffix.{Forward, Backward, Nth} +import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Forward, Backward, Nth} import uk.ac.ox.cs.rsacomb.util.{DataFactory, RSA, RDFoxUtil} /** Factory for [[uk.ac.ox.cs.rsacomb.FilteringProgram FilteringProgram]] */ @@ -95,10 +95,41 @@ class NaiveFilteringProgram( /** `TupleTableName`s for the source/targer named graphs */ val tts: TupleTableName = TupleTableName.create(source.getIRI) - implicit val ttt: TupleTableName = TupleTableName.create(target.getIRI) + val ttt: TupleTableName = TupleTableName.create(target.getIRI) /** Set of atoms in the body of the query */ - val queryBody: List[TupleTableAtom] = query.atoms(tts) + private val queryBody: List[TupleTableAtom] = query.atoms(tts) + + /** Helpers */ + private def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom) + + private val QM: TupleTableAtom = + TupleTableAtom.create(ttt, RSA.QM :: query.answer ::: query.bounded) + private def ID(t1: Term, t2: Term) = + TupleTableAtom.create( + ttt, + RSA.ID +: (query.answer ::: query.bounded) :+ t1 :+ t2 + ) + private def NI(term: Term) = + TupleTableAtom.create(ttt, term, IRI.RDF_TYPE, RSA.NI) + private def TQ(sx: RSASuffix, t1: Term, t2: Term) = + TupleTableAtom.create( + ttt, + (RSA.TQ :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2 + ) + private def AQ(sx: RSASuffix, t1: Term, t2: Term) = + TupleTableAtom.create( + ttt, + (RSA.AQ :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2 + ) + private val FK: TupleTableAtom = + TupleTableAtom.create(ttt, RSA.FK :: query.answer ::: query.bounded) + private val SP: TupleTableAtom = + TupleTableAtom.create(ttt, RSA.SP :: query.answer ::: query.bounded) + private def Ans = if (query.bcq) + TupleTableAtom.create(ttt, RSA("blank"), IRI.RDF_TYPE, RSA.ANS) + else + TupleTableAtom.create(ttt, RSA.ANS :: query.answer) /** Rule generating the instances of the predicate `rsa:NI`. * @@ -117,23 +148,20 @@ class NaiveFilteringProgram( */ val nis: Rule = Rule.create( - RSA.NI(varX), - RSA.Congruent(varX, varY)(tts), - RSA.Named(varY)(tts) + NI(varX), + RSA.Congruent(tts)(varX, varY), + RSA.Named(tts)(varY) ) /** Collection of filtering program rules. */ val rules: List[Rule] = nis :: { - /** Negates a [[tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableAtom TupleTableAtom]] */ - def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom) - /** Generates all possible, unfiltered answers. * * @note corresponds to rule 1 in Table 3 in the paper. */ - val r1 = Rule.create(RSA.QM, queryBody: _*) + val r1 = Rule.create(QM, queryBody: _*) /** Initializes instances of `rsa:ID`. * @@ -145,10 +173,10 @@ class NaiveFilteringProgram( */ val r3a = for ((v, i) <- query.bounded.zipWithIndex) - yield Rule.create(RSA.ID(RSA(i), RSA(i)), RSA.QM, not(RSA.NI(v))) - val r3b = Rule.create(RSA.ID(varV, varU), RSA.ID(varU, varV)) + yield Rule.create(ID(RSA(i), RSA(i)), QM, not(NI(v))) + val r3b = Rule.create(ID(varV, varU), ID(varU, varV)) val r3c = - Rule.create(RSA.ID(varU, varW), RSA.ID(varU, varV), RSA.ID(varV, varW)) + Rule.create(ID(varU, varW), ID(varU, varV), ID(varV, varW)) /** Detects forks in the canonical model. * @@ -162,10 +190,10 @@ class NaiveFilteringProgram( index2 = query.bounded indexOf (role2.getArguments get 2) if index2 >= 0 } yield Rule.create( - RSA.FK, - RSA.ID(RSA(index1), RSA(index2)), - role1 << Forward, - role2 << Forward, + FK, + ID(RSA(index1), RSA(index2)), + role1 :: Forward, + role2 :: Forward, not( TupleTableAtom.create( tts, @@ -183,10 +211,10 @@ class NaiveFilteringProgram( index2 = query.bounded indexOf (role2.getArguments get 0) if index2 >= 0 } yield Rule.create( - RSA.FK, - RSA.ID(RSA(index1), RSA(index2)), - role1 << Forward, - role2 << Backward, + FK, + ID(RSA(index1), RSA(index2)), + role1 :: Forward, + role2 :: Backward, not( TupleTableAtom.create( tts, @@ -204,10 +232,10 @@ class NaiveFilteringProgram( index2 = query.bounded indexOf (role2.getArguments get 0) if index2 >= 0 } yield Rule.create( - RSA.FK, - RSA.ID(RSA(index1), RSA(index2)), - role1 << Backward, - role2 << Backward, + FK, + ID(RSA(index1), RSA(index2)), + role1 :: Backward, + role2 :: Backward, not( TupleTableAtom.create( tts, @@ -234,18 +262,18 @@ class NaiveFilteringProgram( r2arg2 = role2.getArguments get 2 if query.bounded contains r2arg2 } yield Rule.create( - RSA.ID( + ID( RSA(query.bounded indexOf r1arg0), RSA(query.bounded indexOf r2arg0) ), - RSA.ID( + ID( RSA(query.bounded indexOf r1arg2), RSA(query.bounded indexOf r2arg2) ), TupleTableAtom.create(tts, r1arg0, RSA.CONGRUENT, r2arg0), - role1 << Forward, - role2 << Forward, - not(RSA.NI(r1arg0)) + role1 :: Forward, + role2 :: Forward, + not(NI(r1arg0)) ) val r5b = for { role1 <- queryBody filter (_.isRoleAssertion) @@ -259,18 +287,18 @@ class NaiveFilteringProgram( r2arg2 = role2.getArguments get 2 if query.bounded contains r2arg2 } yield Rule.create( - RSA.ID( + ID( RSA(query.bounded indexOf r1arg0), RSA(query.bounded indexOf r2arg2) ), - RSA.ID( + ID( RSA(query.bounded indexOf r1arg2), RSA(query.bounded indexOf r2arg0) ), TupleTableAtom.create(tts, r1arg0, RSA.CONGRUENT, r2arg2), - role1 << Forward, - role2 << Backward, - not(RSA.NI(r1arg0)) + role1 :: Forward, + role2 :: Backward, + not(NI(r1arg0)) ) val r5c = for { role1 <- queryBody filter (_.isRoleAssertion) @@ -284,18 +312,18 @@ class NaiveFilteringProgram( r2arg2 = role2.getArguments get 2 if query.bounded contains r2arg2 } yield Rule.create( - RSA.ID( + ID( RSA(query.bounded indexOf r1arg2), RSA(query.bounded indexOf r2arg2) ), - RSA.ID( + ID( RSA(query.bounded indexOf r1arg0), RSA(query.bounded indexOf r2arg0) ), TupleTableAtom.create(tts, r1arg2, RSA.CONGRUENT, r2arg2), - role1 << Backward, - role2 << Backward, - not(RSA.NI(r1arg2)) + role1 :: Backward, + role2 :: Backward, + not(NI(r1arg2)) ) /** Detect cycles in the canonical model. @@ -314,23 +342,23 @@ class NaiveFilteringProgram( if index2 >= 0 suffix <- Seq(Forward, Backward) } yield Rule.create( - RSA.AQ(suffix, varV, varW), - role << suffix, - RSA.ID(RSA(index0), varV), - RSA.ID(RSA(index2), varW) + AQ(suffix, varV, varW), + role :: suffix, + ID(RSA(index0), varV), + ID(RSA(index2), varW) ) val r7a = for (suffix <- List(Forward, Backward)) yield Rule.create( - RSA.TQ(suffix, varU, varV), - RSA.AQ(suffix, varU, varV) + TQ(suffix, varU, varV), + AQ(suffix, varU, varV) ) val r7b = for (suffix <- List(Forward, Backward)) yield Rule.create( - RSA.TQ(suffix, varU, varW), - RSA.AQ(suffix, varU, varV), - RSA.TQ(suffix, varV, varW) + TQ(suffix, varU, varW), + AQ(suffix, varU, varV), + TQ(suffix, varV, varW) ) /** Flag spurious answers. @@ -340,19 +368,14 @@ class NaiveFilteringProgram( val r8a = for (v <- query.answer) yield Rule.create( - RSA.SP, - RSA.QM, - not( - TupleTableAtom.create(tts, v, IRI.RDF_TYPE, RSA.NAMED) - ) + SP, + QM, + not(TupleTableAtom.create(tts, v, IRI.RDF_TYPE, RSA.NAMED)) ) - val r8b = Rule.create(RSA.SP, RSA.FK) + val r8b = Rule.create(SP, FK) val r8c = for (suffix <- List(Forward, Backward)) - yield Rule.create( - RSA.SP, - RSA.TQ(suffix, varV, varV) - ) + yield Rule.create(SP, TQ(suffix, varV, varV)) /** Determine answers to the query * @@ -369,7 +392,7 @@ class NaiveFilteringProgram( * * @note corresponds to rule 9 in Table 3. */ - val r9 = Rule.create(RSA.Ans, RSA.QM, not(RSA.SP)) + val r9 = Rule.create(Ans, QM, not(SP)) (r1 :: r3a ::: r3b :: r3c :: diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala index 5d11369..f059bcd 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala @@ -54,8 +54,12 @@ object RevisedFilteringProgram { * * @param query CQ to be converted into logic rules. */ - def apply(query: ConjunctiveQuery): RevisedFilteringProgram = - new RevisedFilteringProgram(query) + def apply( + source: IRI, + target: IRI, + query: ConjunctiveQuery + ): RevisedFilteringProgram = + new RevisedFilteringProgram(source, target, query) } @@ -66,8 +70,11 @@ object RevisedFilteringProgram { * * Instances can be created using the companion object. */ -class RevisedFilteringProgram(val query: ConjunctiveQuery) - extends FilteringProgram { +class RevisedFilteringProgram( + val source: IRI, + val target: IRI, + val query: ConjunctiveQuery +) extends FilteringProgram { import RDFoxDSL._ @@ -76,45 +83,47 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) */ import uk.ac.ox.cs.rsacomb.implicits.RSAAtom._ + /** Simplify conversion between Java and Scala `List`s */ + import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ + /** Implicit parameter used in RSA internal predicates. * * @see [[uk.ac.ox.cs.rsacomb.util.RSA]] for more information. */ implicit private[this] val _query = query - /** Helpers */ + /** `TupleTableName`s for the source/targer named graphs */ + val tts: TupleTableName = TupleTableName.create(source.getIRI) + val ttt: TupleTableName = TupleTableName.create(target.getIRI) + + /** Set of atoms in the body of the query */ + private val queryBody: List[TupleTableAtom] = query.atoms(tts) - //private def v(name: String): Term = Variable.create(s"${name}i") + /** Helpers */ private def not(atom: TupleTableAtom): BodyFormula = Negation.create(atom) - private def named(x: Term): TupleTableAtom = - TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA.NAMED) - private def congruent(x: Term, y: Term): TupleTableAtom = - TupleTableAtom.rdf(x, RSA.CONGRUENT, y) - private def skolem(skolem: Term, terms: List[Term]): TupleTableAtom = - TupleTableAtom.create(TupleTableName.SKOLEM, (terms :+ skolem): _*) private def QM(x: Term): TupleTableAtom = - TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("QM")) + TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.QM) private def FK(x: Term): TupleTableAtom = - TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("FK")) + TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.FK) private def SP(x: Term): TupleTableAtom = - TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("SP")) + TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.SP) private def NI(x: Term): TupleTableAtom = - TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("NI")) + TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.NI) private def Ans(x: Term): TupleTableAtom = - TupleTableAtom.rdf(x, IRI.RDF_TYPE, RSA("Ans")) + TupleTableAtom.create(ttt, x, IRI.RDF_TYPE, RSA.ANS) private def ID(x: Term, y: Term): TupleTableAtom = - TupleTableAtom.rdf(x, RSA("ID"), y) - private def AQ(suffix: RSASuffix, x: Term, y: Term): TupleTableAtom = - TupleTableAtom.rdf(x, RSA("AQ"), y) << suffix - private def TQ(suffix: RSASuffix, x: Term, y: Term): TupleTableAtom = - TupleTableAtom.rdf(x, RSA("TQ"), y) << suffix + TupleTableAtom.create(ttt, x, RSA.ID, y) + private def AQ(suffix: RSASuffix)(x: Term, y: Term): TupleTableAtom = + TupleTableAtom.create(ttt, x, RSA.AQ :: suffix, y) + private def TQ(suffix: RSASuffix)(x: Term, y: Term): TupleTableAtom = + TupleTableAtom.create(ttt, x, RSA.TQ :: suffix, y) /** Rule generating the instances of the predicate `rsa:NI`. * * According to the original paper, the set of `rsa:NI` is defined as * the set of constants that are equal (w.r.t. the congruence - * relation represented by `rsa:Congruent`) to a constant in the + * relation represented by `rsacomb:Congruent`) to a constant in the * original ontology. * * @note that the set of `rsa:Named` constants is always a subset of @@ -125,7 +134,8 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) * predicate, this is not feasible, and the instances are instead * generate in the filtering program using a logic rule. */ - val nis: Rule = Rule.create(NI(v"X"), named(v"Y"), congruent(v"X", v"Y")) + val nis: Rule = + Rule.create(NI(v"X"), RSA.Named(tts)(v"Y"), RSA.Congruent(tts)(v"X", v"Y")) /** Collection of filtering program rules. */ val rules: List[Rule] = @@ -138,7 +148,7 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) * @note corresponds to rule 1 in Table 3 in the paper. */ val r1 = - Rule.create(QM(v"K"), (query.atoms :+ skolem(v"K", variables)): _*) + Rule.create(QM(v"K"), queryBody :+ RSA.Skolem(v"K", variables)) /** Initializes instances of `rsa:ID`. * @@ -153,26 +163,26 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) yield Rule.create( ID(v"K", v"S"), QM(v"K"), - skolem(v"K", variables), + RSA.Skolem(v"K", variables), not(NI(v)), - skolem(v"S", variables :+ RSA(i) :+ RSA(i)) + RSA.Skolem(v"S", variables :+ RSA(i) :+ RSA(i)) ) val r3b = Rule.create( ID(v"K", v"T"), ID(v"K", v"S"), - skolem(v"S", variables :+ v"U" :+ v"V"), - skolem(v"T", variables :+ v"V" :+ v"U") + RSA.Skolem(v"S", variables :+ v"U" :+ v"V"), + RSA.Skolem(v"T", variables :+ v"V" :+ v"U") ) val r3c = Rule.create( ID(v"K1", v"Q"), QM(v"K1"), ID(v"K2", v"S"), FilterAtom.create(FunctionCall.equal(v"K1", v"K2")), - skolem(v"S", variables :+ v"U" :+ v"V"), + RSA.Skolem(v"S", variables :+ v"U" :+ v"V"), ID(v"K3", v"T"), FilterAtom.create(FunctionCall.equal(v"K1", v"K3")), - skolem(v"T", variables :+ v"V" :+ v"W"), - skolem(v"Q", variables :+ v"U" :+ v"W") + RSA.Skolem(v"T", variables :+ v"V" :+ v"W"), + RSA.Skolem(v"Q", variables :+ v"U" :+ v"W") ) /** Detects forks in the canonical model. @@ -180,49 +190,55 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) * @note corresponds to rules 4x in Table 3. */ val r4a = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) index1 = query.bounded indexOf (role1.getArguments get 2) if index1 >= 0 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) index2 = query.bounded indexOf (role2.getArguments get 2) if index2 >= 0 } yield Rule.create( FK(v"K"), ID(v"K", v"S"), - skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), - role1 << Forward, - role2 << Forward, - not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 0)) + RSA.Skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), + role1 :: Forward, + role2 :: Forward, + not( + RSA.Congruent(tts)(role1.getArguments get 0, role2.getArguments get 0) + ) ) val r4b = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) index1 = query.bounded indexOf (role1.getArguments get 2) if index1 >= 0 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) index2 = query.bounded indexOf (role2.getArguments get 0) if index2 >= 0 } yield Rule.create( FK(v"K"), ID(v"K", v"S"), - skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), - role1 << Forward, - role2 << Backward, - not(RSA.Congruent(role1.getArguments get 0, role2.getArguments get 2)) + RSA.Skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), + role1 :: Forward, + role2 :: Backward, + not( + RSA.Congruent(tts)(role1.getArguments get 0, role2.getArguments get 2) + ) ) val r4c = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) index1 = query.bounded indexOf (role1.getArguments get 0) if index1 >= 0 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) index2 = query.bounded indexOf (role2.getArguments get 0) if index2 >= 0 } yield Rule.create( FK(v"K"), ID(v"K", v"S"), - skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), - role1 << Backward, - role2 << Backward, - not(RSA.Congruent(role1.getArguments get 2, role2.getArguments get 2)) + RSA.Skolem(v"S", variables :+ RSA(index1) :+ RSA(index2)), + role1 :: Backward, + role2 :: Backward, + not( + RSA.Congruent(tts)(role1.getArguments get 2, role2.getArguments get 2) + ) ) /** Recursively propagates `rsa:ID` predicate. @@ -230,12 +246,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) * @note corresponds to rules 5x in Table 3. */ val r5a = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) r1arg0 = role1.getArguments get 0 if query.bounded contains r1arg0 r1arg2 = role1.getArguments get 2 if query.bounded contains r1arg2 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) r2arg0 = role2.getArguments get 0 if query.bounded contains r2arg0 r2arg2 = role2.getArguments get 2 @@ -243,17 +259,17 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) } yield Rule.create( ID(v"K", v"T"), ID(v"K", v"S"), - skolem( + RSA.Skolem( v"S", variables :+ RSA(query.bounded indexOf r1arg2) :+ RSA(query.bounded indexOf r2arg2) ), - RSA.Congruent(r1arg0, r2arg0), - role1 << Forward, - role2 << Forward, + RSA.Congruent(tts)(r1arg0, r2arg0), + role1 :: Forward, + role2 :: Forward, not(NI(r1arg0)), - skolem( + RSA.Skolem( v"T", variables :+ RSA(query.bounded indexOf r1arg0) :+ @@ -261,12 +277,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) ) ) val r5b = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) r1arg0 = role1.getArguments get 0 if query.bounded contains r1arg0 r1arg2 = role1.getArguments get 2 if query.bounded contains r1arg2 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) r2arg0 = role2.getArguments get 0 if query.bounded contains r2arg0 r2arg2 = role2.getArguments get 2 @@ -274,17 +290,17 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) } yield Rule.create( ID(v"K", v"T"), ID(v"K", v"S"), - skolem( + RSA.Skolem( v"S", variables :+ RSA(query.bounded indexOf r1arg2) :+ RSA(query.bounded indexOf r2arg0) ), - RSA.Congruent(r1arg0, r2arg2), - role1 << Forward, - role2 << Backward, - not(RSA.NI(r1arg0)), - skolem( + RSA.Congruent(tts)(r1arg0, r2arg2), + role1 :: Forward, + role2 :: Backward, + not(NI(r1arg0)), + RSA.Skolem( v"T", variables :+ RSA(query.bounded indexOf r1arg0) :+ @@ -292,12 +308,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) ) ) val r5c = for { - role1 <- query.atoms filter (_.isRoleAssertion) + role1 <- queryBody filter (_.isRoleAssertion) r1arg0 = role1.getArguments get 0 if query.bounded contains r1arg0 r1arg2 = role1.getArguments get 2 if query.bounded contains r1arg2 - role2 <- query.atoms filter (_.isRoleAssertion) + role2 <- queryBody filter (_.isRoleAssertion) r2arg0 = role2.getArguments get 0 if query.bounded contains r2arg0 r2arg2 = role2.getArguments get 2 @@ -305,17 +321,17 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) } yield Rule.create( ID(v"K", v"T"), ID(v"K", v"S"), - skolem( + RSA.Skolem( v"S", variables :+ RSA(query.bounded indexOf r1arg0) :+ RSA(query.bounded indexOf r2arg0) ), - RSA.Congruent(r1arg2, r2arg2), - role1 << Backward, - role2 << Backward, - not(RSA.NI(r1arg2)), - skolem( + RSA.Congruent(tts)(r1arg2, r2arg2), + role1 :: Backward, + role2 :: Backward, + not(NI(r1arg2)), + RSA.Skolem( v"T", variables :+ RSA(query.bounded indexOf r1arg2) :+ @@ -332,38 +348,38 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) * @note corresponds to rules 6,7x in Table 3. */ val r6 = for { - role <- query.atoms filter (_.isRoleAssertion) + role <- queryBody filter (_.isRoleAssertion) index0 = query.bounded indexOf (role.getArguments get 0) if index0 >= 0 index2 = query.bounded indexOf (role.getArguments get 2) if index2 >= 0 suffix <- Seq(Forward, Backward) } yield Rule.create( - AQ(suffix, v"K1", v"Q"), + AQ(suffix)(v"K1", v"Q"), ID(v"K1", v"S"), - skolem(v"S", variables :+ RSA(index0) :+ v"V"), + RSA.Skolem(v"S", variables :+ RSA(index0) :+ v"V"), ID(v"K2", v"T"), FilterAtom.create(FunctionCall.equal(v"K1", v"K2")), - skolem(v"T", variables :+ RSA(index2) :+ v"W"), - role << suffix, - skolem(v"Q", variables :+ v"V" :+ v"W") + RSA.Skolem(v"T", variables :+ RSA(index2) :+ v"W"), + role :: suffix, + RSA.Skolem(v"Q", variables :+ v"V" :+ v"W") ) val r7a = for (suffix <- List(Forward, Backward)) yield Rule.create( - TQ(suffix, v"K", v"S"), - AQ(suffix, v"K", v"S") + TQ(suffix)(v"K", v"S"), + AQ(suffix)(v"K", v"S") ) val r7b = for (suffix <- List(Forward, Backward)) yield Rule.create( - TQ(suffix, v"K1", v"Q"), - AQ(suffix, v"K1", v"S"), - skolem(v"S", variables :+ v"U" :+ v"V"), - TQ(suffix, v"K2", v"T"), + TQ(suffix)(v"K1", v"Q"), + AQ(suffix)(v"K1", v"S"), + RSA.Skolem(v"S", variables :+ v"U" :+ v"V"), + TQ(suffix)(v"K2", v"T"), FilterAtom.create(FunctionCall.equal(v"K1", v"K2")), - skolem(v"T", variables :+ v"V" :+ v"W"), - skolem(v"Q", variables :+ v"U" :+ v"W") + RSA.Skolem(v"T", variables :+ v"V" :+ v"W"), + RSA.Skolem(v"Q", variables :+ v"U" :+ v"W") ) /** Flag spurious answers. @@ -375,19 +391,16 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) yield Rule.create( SP(v"K"), QM(v"K"), - skolem(v"K", variables), - not(RSA.Named(v)) + RSA.Skolem(v"K", variables), + not(RSA.Named(tts)(v)) ) - val r8b = Rule.create( - SP(v"K"), - FK(v"K") - ) + val r8b = Rule.create(SP(v"K"), FK(v"K")) val r8c = for (suffix <- List(Forward, Backward)) yield Rule.create( SP(v"K"), - TQ(suffix, v"K", v"S"), - skolem(v"S", variables :+ v"V" :+ v"V") + TQ(suffix)(v"K", v"S"), + RSA.Skolem(v"S", variables :+ v"V" :+ v"V") ) /** Determine answers to the query @@ -405,11 +418,7 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) * * @note corresponds to rule 9 in Table 3. */ - val r9 = Rule.create( - Ans(v"K"), - QM(v"K"), - not(SP(v"K")) - ) + val r9 = Rule.create(Ans(v"K"), QM(v"K"), not(SP(v"K"))) (r1 :: r3a ::: r3b :: r3c :: r4a ::: r4b ::: r4c ::: r5a ::: r5b ::: r5c ::: r6 ::: r7b ::: r7a ::: r8a ::: r8b :: r8c ::: r9 :: List()) } @@ -422,12 +431,12 @@ class RevisedFilteringProgram(val query: ConjunctiveQuery) s""" SELECT $answer WHERE { - GRAPH <$graph> { ?K a rsa:Ans } . - TT { $answer $bounded ?K } . + GRAPH $target { ?K a ${RSA.ANS} } . + TT ${TupleTableName.SKOLEM} { $answer $bounded ?K } . } """ } else { - s"ASK { GRAPH <$graph> { ?X a rsa:Ans } }" + s"ASK { GRAPH $target { ?X a ${RSA.ANS} } }" } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala index d4b7876..ca77409 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RDFox.scala @@ -17,6 +17,7 @@ package uk.ac.ox.cs.rsacomb.implicits import tech.oxfordsemantic.jrdfox.logic.Datatype +import tech.oxfordsemantic.jrdfox.logic.datalog.TupleTableName import tech.oxfordsemantic.jrdfox.logic.expression.{ BlankNode, IRI => RDFoxIRI, @@ -47,6 +48,9 @@ object RDFox { implicit def stringToRdfoxIri(iri: String): RDFoxIRI = RDFoxIRI.create(iri) + implicit def iriToTupleTableName(iri: RDFoxIRI): TupleTableName = + TupleTableName.create(iri.getIRI) + /** Converst an OWLAPI datatype into an RDFox datatype. * * The builtin datatypes defined by the two systems do not match diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala index 37c70df..89777c4 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/implicits/RSAAtom.scala @@ -29,21 +29,6 @@ import uk.ac.ox.cs.rsacomb.RSAOntology import uk.ac.ox.cs.rsacomb.suffix.{RSASuffix, Nth} import uk.ac.ox.cs.rsacomb.util.{DataFactory, RDFoxUtil} -/* Is this the best way to determine if an atom is an RDF triple? - * Note that we can't use `getNumberOfArguments()` because is not - * "consistent": - * - for an atom created with `rdf(, , )`, - * `getNumberOfArguments` returns 3 - * - for an atom created with `Atom.create(, , - * , )`, `getNumberOfArguments()` returns 3 - * - * This is probably because `Atom.rdf(...) is implemented as: - * ```scala - * def rdf(term1: Term, term2: Term, term3: Term): Atom = - * Atom.create(TupleTableName.create("rdfox:DefaultTriples"), term1, term2, term3) - * ``` - */ - object RSAAtom { implicit class RSAAtom(val atom: TupleTableAtom) { @@ -62,25 +47,25 @@ object RSAAtom { val isRoleAssertion: Boolean = isRDF && !isClassAssertion - def <<(suffix: RSASuffix): TupleTableAtom = - if (isRDF) { - val subj = atom.getArguments.get(0) - val pred = atom.getArguments.get(1) - val obj = atom.getArguments.get(2) - if (isClassAssertion) { - val obj1 = obj match { - case iri: IRI => IRI.create(iri.getIRI :: suffix) - case other => other - } - TupleTableAtom.create(tt, subj, pred, obj1) - } else { - val pred1 = pred match { - case iri: IRI => IRI.create(iri.getIRI :: suffix) - case other => other - } - TupleTableAtom.create(tt, subj, pred1, obj) - } - } else atom + // def <<(suffix: RSASuffix): TupleTableAtom = + // if (isRDF) { + // val subj = atom.getArguments.get(0) + // val pred = atom.getArguments.get(1) + // val obj = atom.getArguments.get(2) + // if (isClassAssertion) { + // val obj1 = obj match { + // case iri: IRI => IRI.create(iri.getIRI :: suffix) + // case other => other + // } + // TupleTableAtom.create(tt, subj, pred, obj1) + // } else { + // val pred1 = pred match { + // case iri: IRI => IRI.create(iri.getIRI :: suffix) + // case other => other + // } + // TupleTableAtom.create(tt, subj, pred1, obj) + // } + // } else atom // def reified(implicit // fresh: DataFactory diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala index 424f2a0..282aa0b 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/suffix/RSASuffix.scala @@ -16,13 +16,13 @@ package uk.ac.ox.cs.rsacomb.suffix -import org.semanticweb.owlapi.model.{ - OWLPropertyExpression, - OWLObjectInverseOf, - OWLObjectProperty -} +// import org.semanticweb.owlapi.model.{ +// OWLPropertyExpression, +// OWLObjectInverseOf, +// OWLObjectProperty +// } -import tech.oxfordsemantic.jrdfox.logic.expression.{IRI} +import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term} import tech.oxfordsemantic.jrdfox.logic.datalog.{TupleTableAtom, TupleTableName} object RSASuffix { @@ -37,7 +37,17 @@ class RSASuffix(val suffix: String => String) { new RSASuffix(this.suffix andThen that.suffix) def ::(str: String): String = this suffix str - + def ::(iri: IRI): IRI = IRI.create(this suffix iri.getIRI) + def ::(tta: TupleTableAtom): TupleTableAtom = { + val ttn: TupleTableName = tta.getTupleTableName + tta.getArguments match { + case List(subj: Term, IRI.RDF_TYPE, obj: IRI) => + TupleTableAtom.create(ttn, subj, IRI.RDF_TYPE, obj :: this) + case List(subj: Term, pred: IRI, obj: Term) => + TupleTableAtom.create(ttn, subj, pred :: this, obj) + case _ => tta + } + } } case object Empty extends RSASuffix(identity) diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala index 40c5ced..5abb83c 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala @@ -54,73 +54,35 @@ object RSA { /** Creates a `rsacomb:` IRI */ def apply(name: Any): IRI = IRI.create( + //Prefixes.decodeIRI("rsacomb:") + name.toString Prefixes.getPrefixIRIsByPrefixName.get("rsacomb:").getIRI + name.toString ) + /** Helper IRIs */ val ANS = RSA("Ans") + val AQ = RSA("AQ") val CONGRUENT = RSA("congruent") + val FK = RSA("FK") + val ID = RSA("ID") val IN = RSA("In") val NAMED = RSA("Named") + val NI = RSA("NI") + val QM = RSA("QM") + val SP = RSA("SP") + val TQ = RSA("TQ") + + def Named(tt: TupleTableName)(x: Term): TupleTableAtom = + TupleTableAtom.create(tt, x, IRI.RDF_TYPE, RSA.NAMED) + def Congruent(tt: TupleTableName)(x: Term, y: Term): TupleTableAtom = + TupleTableAtom.create(tt, x, RSA.CONGRUENT, y) + def Skolem(skolem: Term, terms: List[Term]): TupleTableAtom = + TupleTableAtom.create(TupleTableName.SKOLEM, terms :+ skolem) // def In(t: Term)(implicit set: Term) = // TupleTableAtom.rdf(t, RSA("In"), set) // def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set)) - def Congruent(t1: Term, t2: Term)(implicit graph: TupleTableName) = - TupleTableAtom.create(graph, t1, RSA.CONGRUENT, t2) - - def Named(term: Term)(implicit graph: TupleTableName) = - TupleTableAtom.create(graph, term, IRI.RDF_TYPE, RSA.NAMED) - - def QM(implicit query: ConjunctiveQuery, graph: TupleTableName) = - TupleTableAtom.create(graph, RSA("QM") :: query.answer ::: query.bounded) - - def ID(t1: Term, t2: Term)(implicit - query: ConjunctiveQuery, - graph: TupleTableName - ) = - TupleTableAtom.create( - graph, - RSA("ID") +: (query.answer ::: query.bounded) :+ t1 :+ t2 - ) - - // def Thing(t: Term) = - // TupleTableAtom.rdf(t, IRI.RDF_TYPE, IRI.THING) - - def NI(term: Term)(implicit graph: TupleTableName) = - TupleTableAtom.create(graph, term, IRI.RDF_TYPE, RSA("NI")) - - def TQ(sx: RSASuffix, t1: Term, t2: Term)(implicit - query: ConjunctiveQuery, - graph: TupleTableName - ) = - TupleTableAtom.create( - graph, - RSA("TQ" :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2 - ) - - def AQ(sx: RSASuffix, t1: Term, t2: Term)(implicit - query: ConjunctiveQuery, - graph: TupleTableName - ) = - TupleTableAtom.create( - graph, - RSA("AQ" :: sx) +: (query.answer ::: query.bounded) :+ t1 :+ t2 - ) - - def FK(implicit query: ConjunctiveQuery, graph: TupleTableName) = - TupleTableAtom.create(graph, RSA("FK") :: query.answer ::: query.bounded) - - def SP(implicit q: ConjunctiveQuery, graph: TupleTableName) = - TupleTableAtom.create(graph, RSA("SP") :: q.answer ::: q.bounded) - - def Ans(implicit q: ConjunctiveQuery, graph: TupleTableName) = - if (q.bcq) - TupleTableAtom.create(graph, RSA("blank"), IRI.RDF_TYPE, RSA.ANS) - else - TupleTableAtom.create(graph, RSA.ANS :: q.answer) - /* TODO: review after reworking the dependency graph construction */ // private def atom(name: IRI, vars: List[Term]): TupleTableAtom = -- cgit v1.2.3 From 19fcf57f84a04599062b0751cf781dd073ae360d Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sat, 2 Oct 2021 10:42:15 +0100 Subject: Expose logger level to CLI --- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 27 ++++++++++++++++++---- .../scala/uk/ac/ox/cs/rsacomb/util/Logger.scala | 2 +- 2 files changed, 24 insertions(+), 5 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index c030301..22b1f76 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -19,6 +19,7 @@ package uk.ac.ox.cs.rsacomb import java.io.{File, PrintWriter} import java.nio.file.{Path, Paths, InvalidPathException} import java.util.HashMap +import scala.collection.mutable.Map import scala.collection.JavaConverters._ import tech.oxfordsemantic.jrdfox.client.UpdateType import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term} @@ -50,7 +51,15 @@ object RSAConfig { -h | -? | --help print this help message - -q | --queries + -l | --logger + specify the logger verbosity. Values are: quiet, normal (default), + debug, verbose. + + -o | --output + path to the output file for the answers to the query (in JSON + format) + + -q | --queries path to a file containing a single SPARQL query. If no query is provided, only the approximation to RSA will be performed. @@ -98,10 +107,19 @@ object RSAConfig { println(help) sys.exit(0) } + case flag @ ("-l" | "--logger") :: _level :: tail => { + val level = _level match { + case "quiet" => Logger.QUIET + case "debug" => Logger.DEBUG + case "verbose" => Logger.VERBOSE + case _ => Logger.NORMAL + } + parse(tail, config += ('logger -> level)) + } case flag @ ("-o" | "--output") :: _output :: tail => try { val output = Paths.get(_output) - parse(tail, config ++ Map('output -> output)) + parse(tail, config += ('output -> output)) } catch { case e: InvalidPathException => exit(s"'${_output}' is not a valid filename.") @@ -110,7 +128,7 @@ object RSAConfig { val query = new File(_query) if (!query.isFile) exit(s"'$query' is not a valid filename.") - parse(tail, config ++ Map('queries -> query)) + parse(tail, config += ('queries -> query)) } case _ontology :: _data => { val ontology = new File(_ontology) @@ -119,7 +137,7 @@ object RSAConfig { if (!file.isFile) exit(s"'$file' is not a valid filename.") } - finalise(config ++ Map('ontology -> ontology, 'data -> data)) + finalise(config += ('ontology -> ontology) += ('data -> data)) } case a => exit(s"Invalid sequence of arguments '${a.mkString(" ")}'.") } @@ -134,6 +152,7 @@ object RSAComb extends App { /* Command-line options */ val config = RSAConfig.parse(args.toList) + Logger.level = config('logger).get[Logger.Level] /* Load original ontology and normalize it */ val ontology = Ontology( diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala index bcb1445..a09fcea 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala @@ -34,7 +34,7 @@ object Logger { def compare(that: Level) = this.level - that.level override def toString = name } - case object QUIET extends Level(0, "normal") + case object QUIET extends Level(0, "quiet") case object NORMAL extends Level(1, "normal") case object DEBUG extends Level(2, "debug") case object VERBOSE extends Level(3, "verbose") -- cgit v1.2.3 From d0868f102ac29a04461d9aa68230c0d4bb663426 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sat, 2 Oct 2021 12:10:22 +0100 Subject: Allow querying of a single query --- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 54 ++++++++++++++-------- 1 file changed, 36 insertions(+), 18 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 993e9df..21e0506 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -527,12 +527,22 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) def unfold(axiom: OWLSubClassOfAxiom): Set[Term] = this.self(axiom) | this.cycle(axiom) + /** Returns the answers to a single query + * + * @param queries a sequence of conjunctive queries to answer. + * @return a collection of answers for each query. + */ + def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = this._ask(query) + /** Returns the answers to a collection of queries * * @param queries a sequence of conjunctive queries to answer. * @return a collection of answers for each query. */ - def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] = { + def ask(queries: Seq[ConjunctiveQuery]): Seq[ConjunctiveQueryAnswers] = + queries map _ask + + private lazy val _ask: ConjunctiveQuery => ConjunctiveQueryAnswers = { /* Open connection with RDFox server */ val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) @@ -567,23 +577,31 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}" RDFoxUtil.addFacts(data, RSAOntology.CanonGraph, this.canonicalModel.facts) - queries map { query => - { - val filter = RSAOntology.filteringProgram(query) - - /* Add filtering program */ - Logger print s"Filtering program rules: ${filter.rules.length}" - RDFoxUtil.addRules(data, filter.rules) - // TODO: We remove the rules, should we drop the tuple table as well? - data.clearRulesAxiomsExplicateFacts() - - /* Gather answers to the query */ - RDFoxUtil - .submitQuery(data, filter.answerQuery, RSA.Prefixes) - .map(new ConjunctiveQueryAnswers(query, query.variables, _)) - .get - } - } + /* Close connection with RDFox server */ + RDFoxUtil.closeConnection(server, data) + + (query => { + /* Open connection with RDFox server */ + val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) + val filter = RSAOntology.filteringProgram(query) + + /* Add filtering program */ + Logger print s"Filtering program rules: ${filter.rules.length}" + RDFoxUtil.addRules(data, filter.rules) + // TODO: We remove the rules, should we drop the tuple table as well? + data.clearRulesAxiomsExplicateFacts() + + /* Gather answers to the query */ + val answers = RDFoxUtil + .submitQuery(data, filter.answerQuery, RSA.Prefixes) + .map(new ConjunctiveQueryAnswers(query, query.variables, _)) + .get + + /* Close connection with RDFox server */ + RDFoxUtil.closeConnection(server, data) + + answers + }) } //def ask(query: ConjunctiveQuery): ConjunctiveQueryAnswers = Logger.timed( -- cgit v1.2.3 From 44b8e1c8c724bf7f62f2b567548b941f88a31dc6 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 10:21:10 +0100 Subject: Fix compilation errors --- src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 5 ++--- .../ac/ox/cs/rsacomb/filtering/FilteringProgram.scala | 12 ++++-------- .../scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala | 14 ++++++++++---- .../uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala | 19 +++++++++++-------- 4 files changed, 27 insertions(+), 23 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 21e0506..275f523 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -114,9 +114,8 @@ object RSAOntology { def filteringProgram(query: ConjunctiveQuery): FilteringProgram = Logger.timed( { - val filter = - FilteringProgram(FilterType.REVISED, CanonGraph, FilterGraph(query)) - filter(query) + val filter = FilteringProgram(FilterType.REVISED) + filter(CanonGraph, FilterGraph(query), query) }, "Generating filtering program", Logger.DEBUG diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala index 3015def..075954e 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/FilteringProgram.scala @@ -36,7 +36,7 @@ object FilteringProgram extends Versioned[FilterType] { import FilterType._ - type Result = (ConjunctiveQuery) => FilteringProgram + type Result = (IRI, IRI, ConjunctiveQuery) => FilteringProgram /** Returns the right type of filtering program builder. * @@ -46,14 +46,10 @@ object FilteringProgram extends Versioned[FilterType] { * * @return the right type of filtering program builder. */ - def apply( - filter: FilterType, - source: IRI, - target: IRI - ): (ConjunctiveQuery) => FilteringProgram = + def apply(filter: FilterType): Result = filter match { - case NAIVE => NaiveFilteringProgram(source, target, _) - case REVISED => RevisedFilteringProgram(source, target, _) + case NAIVE => NaiveFilteringProgram(_, _, _) + case REVISED => RevisedFilteringProgram(_, _, _) } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala index 1f44ce1..8d46646 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala @@ -29,8 +29,13 @@ import org.semanticweb.owlapi.apibinding.OWLManager import org.semanticweb.owlapi.model.{OWLOntology, OWLAxiom, OWLLogicalAxiom} import org.semanticweb.owlapi.model.{OWLObjectPropertyExpression} import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory -import tech.oxfordsemantic.jrdfox.logic.datalog.Rule -import tech.oxfordsemantic.jrdfox.logic.expression.{Resource, Term, Variable} +import tech.oxfordsemantic.jrdfox.logic.datalog.{Rule, TupleTableName} +import tech.oxfordsemantic.jrdfox.logic.expression.{ + IRI, + Resource, + Term, + Variable +} import uk.ac.ox.cs.rsacomb.approximation.Approximation import uk.ac.ox.cs.rsacomb.converter._ @@ -143,10 +148,11 @@ object Ontology { RSA.U(varY) ) :: rules /* Load facts and rules from ontology */ - RDFoxUtil.addFacts(data, facts) + val ttn = IRI.create(TupleTableName.DEFAULT_TRIPLES.getName) + RDFoxUtil.addFacts(data, ttn, facts) RDFoxUtil.addRules(data, rules) /* Load data files */ - RDFoxUtil.addData(data, datafiles: _*) + RDFoxUtil.addData(data, ttn, datafiles: _*) /* Build the graph */ val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }" diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala index 73da80f..105f425 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala @@ -100,12 +100,12 @@ class ConjunctiveQuery( /** Returns the query body as a sequence of atoms (triples). */ def atoms(graph: TupleTableName): List[TupleTableAtom] = - where.collect { case c: ConjunctionPattern => - c.getConjuncts.collect { case t: TriplePattern => - TupleTableAtom - .create(graph, t.getSubject, t.getPredicate, t.getObject) + where + .asInstanceOf[ConjunctionPattern] + .getConjuncts + .collect { case t: TriplePattern => + TupleTableAtom.create(graph, t.getSubject, t.getPredicate, t.getObject) } - }.flatten // where match { // case b: ConjunctionPattern => { // b.getConjuncts.toList.flatMap { conj: QueryPattern => @@ -123,13 +123,16 @@ class ConjunctiveQuery( /** Returns the full collection of variables involved in the query. */ val variables: List[Variable] = - where.collect { case c: ConjunctionPattern => - c.getConjuncts.collect { case t: TriplePattern => + where + .asInstanceOf[ConjunctionPattern] + .getConjuncts + .collect { case t: TriplePattern => Set(t.getSubject, t.getPredicate, t.getObject).collect { case v: Variable => v } } - }.distinct + .flatten + .distinct // (where match { // case b: ConjunctionPattern => { // b.getConjuncts.toList.flatMap { conj: QueryPattern => -- cgit v1.2.3 From 2aa8094df2eb9fde48c8073fbdbb2ebcc42fdbf0 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 10:22:34 +0100 Subject: Move to os-lib for filesystem operations --- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 50 ++++++++++++---------- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 4 +- .../uk/ac/ox/cs/rsacomb/ontology/Ontology.scala | 15 ++++--- .../scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 20 +++++---- 4 files changed, 51 insertions(+), 38 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index 22b1f76..24bda1f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -88,6 +88,14 @@ object RSAConfig { sys.exit(1) } + private def getPath(str: String): os.Path = + try { + os.Path(str, base = os.pwd) + } catch { + case e: IllegalArgumentException => + exit(s"'$str' is not a well formed path.") + } + /** Parse arguments with default options * * @param args arguments list @@ -116,26 +124,20 @@ object RSAConfig { } parse(tail, config += ('logger -> level)) } - case flag @ ("-o" | "--output") :: _output :: tail => - try { - val output = Paths.get(_output) - parse(tail, config += ('output -> output)) - } catch { - case e: InvalidPathException => - exit(s"'${_output}' is not a valid filename.") - } + case flag @ ("-o" | "--output") :: output :: tail => + parse(tail, config += ('output -> getPath(output))) case flag @ ("-q" | "--queries") :: _query :: tail => { - val query = new File(_query) - if (!query.isFile) - exit(s"'$query' is not a valid filename.") + val query = getPath(_query) + if (!os.isFile(query)) + exit(s"'${_query}' is not a valid filename.") parse(tail, config += ('queries -> query)) } case _ontology :: _data => { - val ontology = new File(_ontology) - val data = _data.map(new File(_)) - (ontology :: data) foreach { (file) => - if (!file.isFile) - exit(s"'$file' is not a valid filename.") + val ontology = getPath(_ontology) + val data = _data map getPath + (ontology :: data) foreach { (path) => + if (!os.isFile(path)) + exit(s"'$path' is not a valid filename.") } finalise(config += ('ontology -> ontology) += ('data -> data)) } @@ -156,8 +158,8 @@ object RSAComb extends App { /* Load original ontology and normalize it */ val ontology = Ontology( - config('ontology).get[File], - config('data).get[List[File]] + config('ontology).get[os.Path], + config('data).get[List[os.Path]] ).normalize(new Normalizer) //ontology.axioms foreach println @@ -168,14 +170,18 @@ object RSAComb extends App { if (config contains 'queries) { val queries = - RDFoxUtil.loadQueriesFromFile(config('queries).get[File].getAbsoluteFile) + RDFoxUtil.loadQueriesFromFile( + config('queries).get[os.Path] + ) val answers = rsa ask queries /* Write answers to output file */ - val output = new PrintWriter(config('output).get[Path].toFile) - output.write(ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 4)) - output.close() + os.write( + config('output).get[os.Path], + ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 4), + createFolders = true + ) // Logger.print(s"$answers", Logger.VERBOSE) // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 275f523..e2b0e2f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -123,7 +123,7 @@ object RSAOntology { def apply( axioms: List[OWLLogicalAxiom], - datafiles: List[File] + datafiles: List[os.Path] ): RSAOntology = new RSAOntology(axioms, datafiles) // def apply( @@ -191,7 +191,7 @@ object RSAOntology { * @param ontology the input OWL2 ontology. * @param datafiles additinal data (treated as part of the ABox) */ -class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[File]) +class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) extends Ontology(axioms, datafiles) { /** Simplify conversion between OWLAPI and RDFox concepts */ diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala index 8d46646..9d80dd5 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala @@ -77,7 +77,7 @@ object Ontology { */ def dependencyGraph( axioms: List[OWLLogicalAxiom], - datafiles: List[File], + datafiles: List[os.Path], unsafe: List[OWLObjectPropertyExpression] ): DependencyGraph = { @@ -167,10 +167,10 @@ object Ontology { (graph, nodemap) } - def apply(axioms: List[OWLLogicalAxiom], datafiles: List[File]): Ontology = + def apply(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]): Ontology = new Ontology(axioms, datafiles) - def apply(ontology: OWLOntology, datafiles: List[File]): Ontology = { + def apply(ontology: OWLOntology, datafiles: List[os.Path]): Ontology = { /** TBox axioms */ var tbox: List[OWLLogicalAxiom] = @@ -202,8 +202,8 @@ object Ontology { Ontology(abox ::: tbox ::: rbox, datafiles) } - def apply(ontofile: File, datafiles: List[File]): Ontology = { - val ontology = manager.loadOntologyFromOntologyDocument(ontofile) + def apply(ontofile: os.Path, datafiles: List[os.Path]): Ontology = { + val ontology = manager.loadOntologyFromOntologyDocument(ontofile.toIO) Ontology(ontology, datafiles) } @@ -214,7 +214,10 @@ object Ontology { * @param axioms list of axioms (roughly) corresponding to the TBox. * @param datafiles files containing ABox data. */ -class Ontology(val axioms: List[OWLLogicalAxiom], val datafiles: List[File]) { +class Ontology( + val axioms: List[OWLLogicalAxiom], + val datafiles: List[os.Path] +) { /** Extend OWLAxiom functionalities */ import uk.ac.ox.cs.rsacomb.implicits.RSAAxiom._ diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index 217fa7f..aa501cd 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -186,10 +186,15 @@ object RDFoxUtil { * @param graph named graph where the data should be uploaded * @param files sequence of files to upload. */ - def addData(data: DataStoreConnection, graph: IRI, files: File*): Unit = + def addData(data: DataStoreConnection, graph: IRI, files: os.Path*): Unit = Logger.timed( - files.foreach { - data.importData(graph.getIRI, UpdateType.ADDITION, RSA.Prefixes, _) + files.foreach { path => + data.importData( + graph.getIRI, + UpdateType.ADDITION, + RSA.Prefixes, + path.toIO + ) }, "Loading data files", Logger.DEBUG @@ -235,11 +240,11 @@ object RDFoxUtil { * @return a list of [[tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery SelectQuery]] queries. */ def loadQueriesFromFile( - file: File, + path: os.Path, prefixes: Prefixes = new Prefixes() ): List[ConjunctiveQuery] = { - val source = io.Source.fromFile(file) - val queries = source.getLines + val queries = os.read + .lines(path) .map(_.trim.filter(_ >= ' ')) .filterNot(_ == "") .foldRight((List.empty[List[String]], List.empty[String])) { @@ -254,8 +259,7 @@ object RDFoxUtil { .map(_.mkString(" ")) .map(ConjunctiveQuery.parse(_, prefixes)) .collect { case Some(q) => q } - Logger print s"Loaded ${queries.length} queries from ${file.getAbsolutePath}" - source.close() + Logger print s"Loaded ${queries.length} queries from $path" queries } -- cgit v1.2.3 From 0cd63c26fdfafaf09734950add28f63d500ac330 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 11:16:33 +0100 Subject: Assign queries integer identifier The ID needs to be specified at creation time or in a query file preceeding the query with the syntax ``` ^[Query] ``` where `` is the id of the query. --- .../uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala | 9 ++++----- src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 17 ++++++++++------- 2 files changed, 14 insertions(+), 12 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala index 105f425..693a9af 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/sparql/ConjunctiveQuery.scala @@ -32,14 +32,12 @@ import uk.ac.ox.cs.rsacomb.util.RDFoxUtil /** Factory for [[uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery]]. */ object ConjunctiveQuery { - private var idCounter: Int = 0; - /** Creates a new ConjunctiveQuery instance. * * @param query `SelectQuery` instance representing the actual query */ - def apply(query: SelectQuery): ConjunctiveQuery = - new ConjunctiveQuery({ idCounter += 1; idCounter }, query) + def apply(id: Int, query: SelectQuery): ConjunctiveQuery = + new ConjunctiveQuery(id, query) /** Creates a new ConjunctiveQuery from a query string * @@ -50,10 +48,11 @@ object ConjunctiveQuery { * input query represents one, None is returned otherwise. */ def parse( + id: Int, query: String, prefixes: Prefixes = new Prefixes() ): Option[ConjunctiveQuery] = - RDFoxUtil.parseSelectQuery(query, prefixes).map(ConjunctiveQuery(_)) + RDFoxUtil.parseSelectQuery(query, prefixes).map(ConjunctiveQuery(id, _)) } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index aa501cd..46f1160 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -243,21 +243,24 @@ object RDFoxUtil { path: os.Path, prefixes: Prefixes = new Prefixes() ): List[ConjunctiveQuery] = { + val pattern = raw"\^\[Query(\d+)\]".r val queries = os.read .lines(path) .map(_.trim.filter(_ >= ' ')) .filterNot(_ == "") - .foldRight((List.empty[List[String]], List.empty[String])) { + .foldRight((List.empty[Option[ConjunctiveQuery]], List.empty[String])) { case (line, (acc, query)) => { - if ("^#\\^\\[Query\\d+\\]$".r.matches(line)) - (query :: acc, List.empty) - else - (acc, line :: query) + line match { + case pattern(id) => { + val cq = + ConjunctiveQuery.parse(id.toInt, query.mkString(" "), prefixes) + (cq :: acc, List.empty) + } + case _ => (acc, line :: query) + } } } ._1 - .map(_.mkString(" ")) - .map(ConjunctiveQuery.parse(_, prefixes)) .collect { case Some(q) => q } Logger print s"Loaded ${queries.length} queries from $path" queries -- cgit v1.2.3 From 7a73dcd98f3a7824572d098889634662a47d6e7c Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 13:48:29 +0100 Subject: Introduce functional tests These tests will check correctness of the overall system across versions. We should add more tests for know ontologies. --- src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index 46f1160..e3e7dd4 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala @@ -243,7 +243,8 @@ object RDFoxUtil { path: os.Path, prefixes: Prefixes = new Prefixes() ): List[ConjunctiveQuery] = { - val pattern = raw"\^\[Query(\d+)\]".r + val header = raw"\^\[[Qq]uery(\d+)\]".r + val comment = "^#.*".r val queries = os.read .lines(path) .map(_.trim.filter(_ >= ' ')) @@ -251,12 +252,13 @@ object RDFoxUtil { .foldRight((List.empty[Option[ConjunctiveQuery]], List.empty[String])) { case (line, (acc, query)) => { line match { - case pattern(id) => { + case header(id) => { val cq = ConjunctiveQuery.parse(id.toInt, query.mkString(" "), prefixes) (cq :: acc, List.empty) } - case _ => (acc, line :: query) + case comment() => (acc, query) + case _ => (acc, line :: query) } } } -- cgit v1.2.3 From b290ea110d7bbb3cee6af2fc25b7ce07308b32f4 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 13:49:59 +0100 Subject: Fix prefix in dependency graph --- src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala index 9d80dd5..ece6d15 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala @@ -155,7 +155,7 @@ object Ontology { RDFoxUtil.addData(data, ttn, datafiles: _*) /* Build the graph */ - val query = "SELECT ?X ?Y WHERE { ?X rsa:E ?Y }" + val query = "SELECT ?X ?Y WHERE { ?X rsacomb:E ?Y }" val answers = RDFoxUtil.submitQuery(data, query, RSA.Prefixes).get var edges: Seq[DiEdge[Resource]] = answers.collect { case (_, Seq(n1, n2)) => n1 ~> n2 } -- cgit v1.2.3 From 830b8d9f8902a4a3fc536f40843352e9ff905020 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 22:09:32 +0100 Subject: Rework CLI configs --- src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala | 134 ++---------------- src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala | 154 +++++++++++++++++++++ 2 files changed, 164 insertions(+), 124 deletions(-) create mode 100644 src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala index 24bda1f..121c65f 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/Main.scala @@ -19,7 +19,6 @@ package uk.ac.ox.cs.rsacomb import java.io.{File, PrintWriter} import java.nio.file.{Path, Paths, InvalidPathException} import java.util.HashMap -import scala.collection.mutable.Map import scala.collection.JavaConverters._ import tech.oxfordsemantic.jrdfox.client.UpdateType import tech.oxfordsemantic.jrdfox.logic.expression.{IRI, Term} @@ -32,129 +31,15 @@ import uk.ac.ox.cs.rsacomb.ontology.Ontology import uk.ac.ox.cs.rsacomb.converter.Normalizer import uk.ac.ox.cs.rsacomb.approximation.{Upperbound, Lowerbound} -case class RSAOption[+T](opt: T) { - def get[T]: T = opt.asInstanceOf[T] -} - -object RSAConfig { - type Config = Map[Symbol, RSAOption[Any]] - - private implicit def toRSAOption[T](opt: T) = RSAOption[T](opt) - - /** Help message */ - private val help: String = """ - rsacomb - combined approach for CQ answering for RSA ontologies. - - USAGE - rsacomb [OPTIONS] [ ...] - - -h | -? | --help - print this help message - - -l | --logger - specify the logger verbosity. Values are: quiet, normal (default), - debug, verbose. - - -o | --output - path to the output file for the answers to the query (in JSON - format) - - -q | --queries - path to a file containing a single SPARQL query. If no query - is provided, only the approximation to RSA will be performed. - - - file containing the ontology - - - one or more data files - - """ - - /** Default config values */ - private val default: Config = Map.empty - - /** Utility to exit the program with a custom message on stderr. - * - * The program will exit with error code 1 after printing the help - * message. - * - * @param msg message printed to stderr. - */ - private def exit(msg: String): Nothing = { - System.err.println(msg) - System.err.println() - System.err.println(help) - sys.exit(1) - } - - private def getPath(str: String): os.Path = - try { - os.Path(str, base = os.pwd) - } catch { - case e: IllegalArgumentException => - exit(s"'$str' is not a well formed path.") - } - - /** Parse arguments with default options - * - * @param args arguments list - * @return map of config options - */ - def parse(args: List[String]): Config = parse(args, default) - - /** Parse arguments - * - * @param args arguments list - * @param config default configuration - * @return map of config options - */ - def parse(args: List[String], config: Config): Config = { - args match { - case flag @ ("-h" | "-?" | "--help") :: _ => { - println(help) - sys.exit(0) - } - case flag @ ("-l" | "--logger") :: _level :: tail => { - val level = _level match { - case "quiet" => Logger.QUIET - case "debug" => Logger.DEBUG - case "verbose" => Logger.VERBOSE - case _ => Logger.NORMAL - } - parse(tail, config += ('logger -> level)) - } - case flag @ ("-o" | "--output") :: output :: tail => - parse(tail, config += ('output -> getPath(output))) - case flag @ ("-q" | "--queries") :: _query :: tail => { - val query = getPath(_query) - if (!os.isFile(query)) - exit(s"'${_query}' is not a valid filename.") - parse(tail, config += ('queries -> query)) - } - case _ontology :: _data => { - val ontology = getPath(_ontology) - val data = _data map getPath - (ontology :: data) foreach { (path) => - if (!os.isFile(path)) - exit(s"'$path' is not a valid filename.") - } - finalise(config += ('ontology -> ontology) += ('data -> data)) - } - case a => exit(s"Invalid sequence of arguments '${a.mkString(" ")}'.") - } - } - - /** Perform final checks on parsed options */ - private def finalise(config: Config): Config = config -} - /** Main entry point to the program */ object RSAComb extends App { /* Command-line options */ val config = RSAConfig.parse(args.toList) - Logger.level = config('logger).get[Logger.Level] + + /* Set logger level */ + if (config.contains('logger)) + Logger.level = config('logger).get[Logger.Level] /* Load original ontology and normalize it */ val ontology = Ontology( @@ -177,11 +62,12 @@ object RSAComb extends App { val answers = rsa ask queries /* Write answers to output file */ - os.write( - config('output).get[os.Path], - ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 4), - createFolders = true - ) + if (config.contains('answers)) + os.write( + config('answers).get[os.Path], + ujson.write(ujson.Arr(answers.map(_.toJSON)), indent = 4), + createFolders = true + ) // Logger.print(s"$answers", Logger.VERBOSE) // Logger print s"Number of answers: ${answers.length} (${answers.lengthWithMultiplicity})" diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala new file mode 100644 index 0000000..4d96850 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAConfig.scala @@ -0,0 +1,154 @@ +/* + * Copyright 2020, 2021 KRR Oxford + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package uk.ac.ox.cs.rsacomb + +import scala.collection.mutable.Map +import util.Logger + +case class RSAOption[+T](opt: T) { + def get[T]: T = opt.asInstanceOf[T] +} + +object RSAConfig { + type Config = Map[Symbol, RSAOption[Any]] + + private implicit def toRSAOption[T](opt: T) = RSAOption[T](opt) + + /** Help message */ + private val help: String = """ + rsacomb - combined approach for CQ answering for RSA ontologies. + + USAGE + rsacomb [OPTIONS] [ ...] + + -h | -? | --help + print this help message + + -l | --logger + specify the logger verbosity. Values are: quiet, normal (default), + debug, verbose. + + -a | --answers + path to the output file for the answers to the query (in JSON + format) + + -q | --queries + path to a file containing a single SPARQL query. If no query + is provided, only the approximation to RSA will be performed. + + -o | --ontology + ontology file in OWL format. + + -d | --data + data files to be used alongside the ontology file. If a + directory is provided, all files in the directory (recursively) + will be considered. + + """ + + /** Default config values */ + private val default: Config = Map.empty + + /** Utility to exit the program with a custom message on stderr. + * + * The program will exit with error code 1 after printing the help + * message. + * + * @param msg message printed to stderr. + */ + private def exit(msg: String): Nothing = { + System.err.println(msg) + System.err.println() + System.err.println(help) + sys.exit(1) + } + + private def getPath(str: String): os.Path = + try { + os.Path(str, base = os.pwd) + } catch { + case e: IllegalArgumentException => + exit(s"'$str' is not a well formed path.") + } + + /** Parse arguments with default options + * + * @param args arguments list + * @return map of config options + */ + def parse(args: List[String]): Config = parse(args, default) + + /** Parse arguments + * + * @param args arguments list + * @param config default configuration + * @return map of config options + */ + def parse(args: List[String], config: Config): Config = { + args match { + case Nil => finalise(config) + case flag @ ("-h" | "-?" | "--help") :: _ => { + println(help) + sys.exit(0) + } + case flag @ ("-l" | "--logger") :: _level :: tail => { + val level = _level match { + case "quiet" => Logger.QUIET + case "debug" => Logger.DEBUG + case "verbose" => Logger.VERBOSE + case _ => Logger.NORMAL + } + parse(tail, config += ('logger -> level)) + } + case flag @ ("-a" | "--answers") :: answers :: tail => + parse(tail, config += ('answers -> getPath(answers))) + case flag @ ("-q" | "--queries") :: _query :: tail => { + val query = getPath(_query) + if (!os.isFile(query)) + exit(s"'${_query}' is not a valid filename.") + parse(tail, config += ('queries -> query)) + } + case flag @ ("-o" | "--ontology") :: _ontology :: tail => { + val ontology = getPath(_ontology) + if (!os.isFile(ontology)) + exit(s"'${_ontology}' is not a valid filename.") + parse(tail, config += ('ontology -> ontology)) + } + case flag @ ("-d" | "--data") :: _data :: tail => { + val data = getPath(_data) + val files = + if (os.isFile(data)) + Seq(data) + else if (os.isDir(data)) + os.walk(data).filter(os.isFile) + else + exit(s"'${_data}' is not a valid path.") + parse(tail, config += ('data -> files)) + } + case a => exit(s"Invalid sequence of arguments '${a.mkString(" ")}'.") + } + } + + /** Perform final checks on parsed options */ + private def finalise(config: Config): Config = { + if (!config.contains('ontology)) + exit("The following flag is mandatory: '-o' or '--ontology'.") + if (!config.contains('data)) + config += ('data -> List.empty[os.Path]) + config + } +} -- cgit v1.2.3 From 55384aaf879027012d688bfa566e34928a5f57e1 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Sun, 3 Oct 2021 22:57:52 +0100 Subject: Add file output on verbose log level --- src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 4 ++++ src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala | 17 +++++++++++------ 2 files changed, 15 insertions(+), 6 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index e2b0e2f..83ebbcb 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -549,6 +549,8 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) RDFoxUtil.addData(data, RSAOntology.CanonGraph, datafiles: _*) /* Top / equality axiomatization */ RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) + Logger.write(topAxioms.mkString("\n"), "canonical_model.datalog") + Logger.write(equalityAxioms.mkString("\n"), "canonical_model.datalog") /* Generate `named` predicates */ // TODO: do I need both to generate all NAMED atoms? RDFoxUtil.addFacts( @@ -571,6 +573,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) /* Add canonical model */ Logger print s"Canonical model rules: ${this.canonicalModel.rules.length}" + Logger.write(canonicalModel.rules.mkString("\n"), "canonical_model.datalog") RDFoxUtil.addRules(data, this.canonicalModel.rules) Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}" @@ -586,6 +589,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) /* Add filtering program */ Logger print s"Filtering program rules: ${filter.rules.length}" + Logger.write(filter.rules.mkString("\n"), "filter.datalog") RDFoxUtil.addRules(data, filter.rules) // TODO: We remove the rules, should we drop the tuple table as well? data.clearRulesAxiomsExplicateFacts() diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala index a09fcea..275079c 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala @@ -25,6 +25,10 @@ import java.io.PrintStream */ object Logger { + private val time = Calendar.getInstance() + + private lazy val dir = os.temp.dir(os.pwd, "rsacomb-", false) + /** Output stream for the logger. */ var output: PrintStream = System.out @@ -42,12 +46,13 @@ object Logger { /** Currend logger level */ var level: Level = DEBUG - def print(str: Any, lvl: Level = NORMAL): Unit = { - if (lvl <= level) { - val time = Calendar.getInstance.getTime - output println s"[$lvl][$time] $str" - } - } + def print(str: Any, lvl: Level = NORMAL): Unit = + if (lvl <= level) + output println s"[$lvl][${time.getTime}] $str" + + def write(content: os.Source, file: String, lvl: Level = VERBOSE): Unit = + if (lvl <= level) + os.write.append(dir / file, content) def timed[A](expr: => A, desc: String = "", lvl: Level = NORMAL): A = { val t0 = System.currentTimeMillis() -- cgit v1.2.3 From 0d311287610bcf14a1b4ff35008359dde8c00fc3 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Mon, 4 Oct 2021 07:34:41 +0100 Subject: Make file logging lazy --- src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 2 +- src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index 83ebbcb..afda25e 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -589,7 +589,7 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) /* Add filtering program */ Logger print s"Filtering program rules: ${filter.rules.length}" - Logger.write(filter.rules.mkString("\n"), "filter.datalog") + Logger.write(filter.rules.mkString("\n"), s"filter${query.id}.datalog") RDFoxUtil.addRules(data, filter.rules) // TODO: We remove the rules, should we drop the tuple table as well? data.clearRulesAxiomsExplicateFacts() diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala index 275079c..a55b5a0 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala @@ -50,7 +50,7 @@ object Logger { if (lvl <= level) output println s"[$lvl][${time.getTime}] $str" - def write(content: os.Source, file: String, lvl: Level = VERBOSE): Unit = + def write(content: => os.Source, file: String, lvl: Level = VERBOSE): Unit = if (lvl <= level) os.write.append(dir / file, content) -- cgit v1.2.3 From c86e7d32420adcc05546efa45b21e0e31d0f6c90 Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Mon, 4 Oct 2021 18:01:50 +0100 Subject: Fix issue where not all roles where considered for top axiomatisation --- .../scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala | 2 +- .../scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala | 76 ++++++++++++---------- .../ox/cs/rsacomb/approximation/Lowerbound.scala | 7 +- .../ox/cs/rsacomb/approximation/Upperbound.scala | 2 + .../filtering/RevisedFilteringProgram.scala | 2 +- .../uk/ac/ox/cs/rsacomb/ontology/Ontology.scala | 8 ++- 6 files changed, 56 insertions(+), 41 deletions(-) (limited to 'src/main/scala') diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala index a39b9c0..bd3d3c3 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/CanonicalModel.scala @@ -68,7 +68,7 @@ class CanonicalModel(val ontology: RSAOntology, val graph: IRI) { */ val rolesAdditionalRules: List[Rule] = { val tt = TupleTableName.create(graph.getIRI) - ontology.roles + ontology.objroles .collect { case prop: OWLObjectProperty => prop } .flatMap((pred) => { val iri = pred.getIRI.getIRIString diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala index afda25e..1ff466b 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/RSAOntology.scala @@ -27,6 +27,7 @@ import org.semanticweb.owlapi.model.{OWLOntology, OWLAxiom, OWLLogicalAxiom} import org.semanticweb.owlapi.model.{ OWLClass, OWLClassExpression, + OWLDataProperty, OWLDataPropertyAssertionAxiom, OWLObjectProperty, OWLSubObjectPropertyOfAxiom, @@ -46,18 +47,20 @@ import tech.oxfordsemantic.jrdfox.client.{ } import tech.oxfordsemantic.jrdfox.Prefixes import tech.oxfordsemantic.jrdfox.logic.datalog.{ + BodyFormula, + FilterAtom, + Negation, Rule, TupleTableAtom, - TupleTableName, - Negation, - BodyFormula + TupleTableName } import tech.oxfordsemantic.jrdfox.logic.expression.{ - Term, - Variable, + FunctionCall, IRI, + Literal, Resource, - Literal + Term, + Variable } import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery @@ -122,9 +125,10 @@ object RSAOntology { ) def apply( + origin: OWLOntology, axioms: List[OWLLogicalAxiom], datafiles: List[os.Path] - ): RSAOntology = new RSAOntology(axioms, datafiles) + ): RSAOntology = new RSAOntology(origin, axioms, datafiles) // def apply( // ontofile: File, @@ -191,8 +195,11 @@ object RSAOntology { * @param ontology the input OWL2 ontology. * @param datafiles additinal data (treated as part of the ABox) */ -class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) - extends Ontology(axioms, datafiles) { +class RSAOntology( + origin: OWLOntology, + axioms: List[OWLLogicalAxiom], + datafiles: List[os.Path] +) extends Ontology(origin, axioms, datafiles) { /** Simplify conversion between OWLAPI and RDFox concepts */ import implicits.RDFox._ @@ -221,10 +228,9 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) /** Retrieve concepts/roles in the ontology */ val concepts: List[OWLClass] = ontology.getClassesInSignature().asScala.toList - val roles: List[OWLObjectPropertyExpression] = - axioms - .flatMap(_.objectPropertyExpressionsInSignature) - .distinct + val objroles: List[OWLObjectPropertyExpression] = + axioms.flatMap(_.objectPropertyExpressionsInSignature).distinct + val dataroles: List[OWLDataProperty] = origin.getDataPropertiesInSignature /** Unsafe roles of a given ontology. * @@ -358,14 +364,12 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) private val topAxioms: List[Rule] = { val varX = Variable.create("X") val varY = Variable.create("Y") + val varZ = Variable.create("Z") val graph = TupleTableName.create(RSAOntology.CanonGraph.getIRI) - concepts - .map(c => { - Rule.create( - TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING), - TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, c.getIRI) - ) - }) ++ roles.map(r => { + Rule.create( + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING), + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, varY) + ) :: objroles.map(r => { val name = r match { case x: OWLObjectProperty => x.getIRI.getIRIString case x: OWLObjectInverseOf => @@ -378,6 +382,15 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) ), List(TupleTableAtom.create(graph, varX, name, varY)) ) + }) ::: dataroles.map(r => { + val name = r.getIRI.getIRIString + Rule.create( + List( + TupleTableAtom.create(graph, varX, IRI.RDF_TYPE, IRI.THING), + TupleTableAtom.create(graph, varY, IRI.RDF_TYPE, IRI.THING) + ), + List(TupleTableAtom.create(graph, varX, name, varY)) + ) }) } @@ -542,22 +555,17 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) queries map _ask private lazy val _ask: ConjunctiveQuery => ConjunctiveQueryAnswers = { - /* Open connection with RDFox server */ val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) /* Upload data from data file */ RDFoxUtil.addData(data, RSAOntology.CanonGraph, datafiles: _*) - /* Top / equality axiomatization */ + + /* Top/equality axiomatization */ RDFoxUtil.addRules(data, topAxioms ++ equalityAxioms) Logger.write(topAxioms.mkString("\n"), "canonical_model.datalog") Logger.write(equalityAxioms.mkString("\n"), "canonical_model.datalog") - /* Generate `named` predicates */ - // TODO: do I need both to generate all NAMED atoms? - RDFoxUtil.addFacts( - data, - RSAOntology.CanonGraph, - (individuals ++ literals) map RSA.Named(RSAOntology.CanonGraph) - ) + + /* Introduce `rsacomb:Named` concept */ data.evaluateUpdate( null, // the base IRI for the query (if null, a default is used) RSA.Prefixes, @@ -572,25 +580,24 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) ) /* Add canonical model */ + Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}" + RDFoxUtil.addFacts(data, RSAOntology.CanonGraph, this.canonicalModel.facts) Logger print s"Canonical model rules: ${this.canonicalModel.rules.length}" Logger.write(canonicalModel.rules.mkString("\n"), "canonical_model.datalog") RDFoxUtil.addRules(data, this.canonicalModel.rules) - Logger print s"Canonical model facts: ${this.canonicalModel.facts.length}" - RDFoxUtil.addFacts(data, RSAOntology.CanonGraph, this.canonicalModel.facts) - - /* Close connection with RDFox server */ RDFoxUtil.closeConnection(server, data) (query => { - /* Open connection with RDFox server */ val (server, data) = RDFoxUtil.openConnection(RSAOntology.DataStore) + val filter = RSAOntology.filteringProgram(query) /* Add filtering program */ Logger print s"Filtering program rules: ${filter.rules.length}" Logger.write(filter.rules.mkString("\n"), s"filter${query.id}.datalog") RDFoxUtil.addRules(data, filter.rules) + // TODO: We remove the rules, should we drop the tuple table as well? data.clearRulesAxiomsExplicateFacts() @@ -600,7 +607,6 @@ class RSAOntology(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]) .map(new ConjunctiveQueryAnswers(query, query.variables, _)) .get - /* Close connection with RDFox server */ RDFoxUtil.closeConnection(server, data) answers diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala index 88732d5..e261bce 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Lowerbound.scala @@ -51,6 +51,7 @@ class Lowerbound(implicit fresh: DataFactory) def approximate(ontology: Ontology): RSAOntology = toRSA( new Ontology( + ontology.origin, ontology.axioms filter inALCHOIQ flatMap shift, ontology.datafiles ) @@ -218,7 +219,11 @@ class Lowerbound(implicit fresh: DataFactory) }.toList /* Remove axioms from approximated ontology */ - RSAOntology(ontology.axioms diff toDelete, ontology.datafiles) + RSAOntology( + ontology.origin, + ontology.axioms diff toDelete, + ontology.datafiles + ) } // val edges1 = Seq('A ~> 'B, 'B ~> 'C, 'C ~> 'D, 'D ~> 'H, 'H ~> diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala index 1ae7941..469d774 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/approximation/Upperbound.scala @@ -51,6 +51,7 @@ class Upperbound(implicit fresh: DataFactory) def approximate(ontology: Ontology): RSAOntology = toRSA( new Ontology( + ontology.origin, ontology.axioms flatMap toConjuncts, ontology.datafiles ) @@ -161,6 +162,7 @@ class Upperbound(implicit fresh: DataFactory) /* Substitute selected axioms with their "skolemized" version */ RSAOntology( + ontology.origin, ontology.axioms diff toSkolem concat skolemized, ontology.datafiles ) diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala index f059bcd..94524be 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/filtering/RevisedFilteringProgram.scala @@ -42,7 +42,7 @@ object RDFoxDSL { import scala.collection.JavaConverters._ implicit class MyVariable(private val str: StringContext) extends AnyVal { - def v(args: Any*): Variable = Variable.create(s"${str.s(args: _*)}i") + def v(args: Any*): Variable = Variable.create(s"${str.s(args: _*)}") } } diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala index ece6d15..0aceb01 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/ontology/Ontology.scala @@ -167,8 +167,8 @@ object Ontology { (graph, nodemap) } - def apply(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]): Ontology = - new Ontology(axioms, datafiles) + // def apply(axioms: List[OWLLogicalAxiom], datafiles: List[os.Path]): Ontology = + // new Ontology(axioms, datafiles) def apply(ontology: OWLOntology, datafiles: List[os.Path]): Ontology = { @@ -199,7 +199,7 @@ object Ontology { .collect(Collectors.toList()) .collect { case a: OWLLogicalAxiom => a } - Ontology(abox ::: tbox ::: rbox, datafiles) + new Ontology(ontology, abox ::: tbox ::: rbox, datafiles) } def apply(ontofile: os.Path, datafiles: List[os.Path]): Ontology = { @@ -215,6 +215,7 @@ object Ontology { * @param datafiles files containing ABox data. */ class Ontology( + val origin: OWLOntology, val axioms: List[OWLLogicalAxiom], val datafiles: List[os.Path] ) { @@ -293,6 +294,7 @@ class Ontology( */ def normalize(normalizer: Normalizer): Ontology = new Ontology( + origin, axioms flatMap normalizer.normalize, datafiles ) -- cgit v1.2.3