diff options
Diffstat (limited to 'src/main/scala/uk/ac')
8 files changed, 1523 insertions, 0 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/Main.scala b/src/main/scala/uk/ac/ox/cs/acqua/Main.scala new file mode 100644 index 0000000..b16f03a --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/Main.scala | |||
| @@ -0,0 +1,86 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua | ||
| 18 | |||
| 19 | import uk.ac.ox.cs.rsacomb.converter.Normalizer | ||
| 20 | import uk.ac.ox.cs.rsacomb.ontology.Ontology | ||
| 21 | import uk.ac.ox.cs.rsacomb.util.{RDFoxUtil,RSA} | ||
| 22 | |||
| 23 | import uk.ac.ox.cs.pagoda.owl.OWLHelper | ||
| 24 | import uk.ac.ox.cs.pagoda.reasoner.{ELHOQueryReasoner,MyQueryReasoner,QueryReasoner,RLQueryReasoner} | ||
| 25 | import uk.ac.ox.cs.pagoda.util.PagodaProperties; | ||
| 26 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 27 | |||
| 28 | import uk.ac.ox.cs.acqua.reasoner.{ | ||
| 29 | AcquaQueryReasoner, | ||
| 30 | RSACombQueryReasoner | ||
| 31 | } | ||
| 32 | import uk.ac.ox.cs.acqua.util.AcquaConfig | ||
| 33 | |||
| 34 | object Acqua extends App { | ||
| 35 | val config = AcquaConfig.parse(args.toList) | ||
| 36 | AcquaConfig describe config | ||
| 37 | |||
| 38 | val ontopath = config('ontology).get[os.Path] | ||
| 39 | val datapath = config('data).get[List[os.Path]] | ||
| 40 | val ontology = Ontology(ontopath, datapath).normalize(new Normalizer) | ||
| 41 | |||
| 42 | val properties = new PagodaProperties() | ||
| 43 | /* TODO: find a better way to integrate CLI options from different | ||
| 44 | * tools. A good idea would be to have [[util.AcquaConfig]] handle the | ||
| 45 | * CLI and have methods to convert it into other "config" classes to | ||
| 46 | * use with tool-specific interfaces. | ||
| 47 | */ | ||
| 48 | if (config contains 'pagodata) | ||
| 49 | properties setDataPath config('pagodata).get[String] | ||
| 50 | |||
| 51 | val performMultiStages = true | ||
| 52 | val considerEqualities = true | ||
| 53 | |||
| 54 | val reasoner: QueryReasoner = if (OWLHelper.isInOWL2RL(ontology.origin)) { | ||
| 55 | new RLQueryReasoner(); | ||
| 56 | } else if (OWLHelper.isInELHO(ontology.origin)) { | ||
| 57 | new ELHOQueryReasoner(); | ||
| 58 | } else if (ontology.isRSA) { | ||
| 59 | new RSACombQueryReasoner(ontology) | ||
| 60 | } else { | ||
| 61 | new AcquaQueryReasoner(ontology) | ||
| 62 | } | ||
| 63 | |||
| 64 | /* Preprocessing */ | ||
| 65 | reasoner.setProperties(properties) | ||
| 66 | reasoner.loadOntology(ontology.origin) | ||
| 67 | reasoner.importData(properties.getDataPath()) | ||
| 68 | if (reasoner.preprocess()) { | ||
| 69 | Utility logInfo "The ontology is consistent!" | ||
| 70 | } | ||
| 71 | else { | ||
| 72 | Utility logInfo "The ontology is inconsistent!" | ||
| 73 | reasoner.dispose(); | ||
| 74 | sys.exit(0) | ||
| 75 | } | ||
| 76 | |||
| 77 | /* Query Answering */ | ||
| 78 | if (config contains 'queries) { | ||
| 79 | val queryManager = reasoner.getQueryManager() | ||
| 80 | config('queries).get[List[os.Path]].map(path => { | ||
| 81 | val queries = queryManager collectQueryRecords path.toString | ||
| 82 | reasoner evaluate queries | ||
| 83 | }) | ||
| 84 | } | ||
| 85 | } | ||
| 86 | |||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/approximation/Noop.scala b/src/main/scala/uk/ac/ox/cs/acqua/approximation/Noop.scala new file mode 100644 index 0000000..69489ac --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/approximation/Noop.scala | |||
| @@ -0,0 +1,34 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua.approximation | ||
| 18 | |||
| 19 | import uk.ac.ox.cs.rsacomb.ontology.{Ontology,RSAOntology} | ||
| 20 | import uk.ac.ox.cs.rsacomb.approximation.Approximation | ||
| 21 | |||
| 22 | /** Dummy approximation without any effect. | ||
| 23 | * | ||
| 24 | * @note this is only useful to convert an already RSA | ||
| 25 | * [[uk.ac.ox.cs.rsacomb.ontology.Ontology]] into an | ||
| 26 | * [[uk.ac.ox.cs.rsacomb.ontology.RSAOntology]]. | ||
| 27 | */ | ||
| 28 | object Noop extends Approximation[RSAOntology] { | ||
| 29 | |||
| 30 | def approximate(ontology: Ontology): RSAOntology = | ||
| 31 | RSAOntology(ontology.origin, ontology.axioms, ontology.datafiles) | ||
| 32 | |||
| 33 | } | ||
| 34 | |||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/implicits/PagodaConverters.scala b/src/main/scala/uk/ac/ox/cs/acqua/implicits/PagodaConverters.scala new file mode 100644 index 0000000..7d8d3ec --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/implicits/PagodaConverters.scala | |||
| @@ -0,0 +1,42 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua.implicits | ||
| 18 | |||
| 19 | import java.util.Collection | ||
| 20 | import scala.collection.JavaConverters._ | ||
| 21 | |||
| 22 | import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery | ||
| 23 | import uk.ac.ox.cs.rsacomb.util.RSA | ||
| 24 | import uk.ac.ox.cs.pagoda.query.QueryRecord | ||
| 25 | |||
| 26 | object PagodaConverters { | ||
| 27 | |||
| 28 | implicit def queryRecord2conjuctiveQuery(q: QueryRecord): ConjunctiveQuery = { | ||
| 29 | ConjunctiveQuery.parse( | ||
| 30 | q.getQueryID.toIntOption.getOrElse(-1), | ||
| 31 | q.getQueryText(), | ||
| 32 | RSA.Prefixes | ||
| 33 | ).get | ||
| 34 | } | ||
| 35 | |||
| 36 | implicit def queryRecords2conjuctiveQueries(qs: Collection[QueryRecord]): List[ConjunctiveQuery] = | ||
| 37 | qs.asScala.map(queryRecord2conjuctiveQuery).toList | ||
| 38 | |||
| 39 | } | ||
| 40 | |||
| 41 | |||
| 42 | |||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/implicits/RSACombAnswerTuples.scala b/src/main/scala/uk/ac/ox/cs/acqua/implicits/RSACombAnswerTuples.scala new file mode 100644 index 0000000..d0bba72 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/implicits/RSACombAnswerTuples.scala | |||
| @@ -0,0 +1,147 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua.implicits | ||
| 18 | |||
| 19 | import uk.ac.ox.cs.JRDFox.model.{ | ||
| 20 | BlankNode => OldBlankNode, | ||
| 21 | Datatype => OldDatatype, | ||
| 22 | Individual => OldIndividual, | ||
| 23 | Literal => OldLiteral, | ||
| 24 | } | ||
| 25 | import tech.oxfordsemantic.jrdfox.logic.Datatype | ||
| 26 | import tech.oxfordsemantic.jrdfox.logic.expression.{ | ||
| 27 | BlankNode, | ||
| 28 | IRI, | ||
| 29 | Literal, | ||
| 30 | Resource | ||
| 31 | } | ||
| 32 | import uk.ac.ox.cs.pagoda.query.{AnswerTuple,AnswerTuples} | ||
| 33 | import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQueryAnswers | ||
| 34 | |||
| 35 | /** Implicit wrapper around [[uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQueryAnswers]] | ||
| 36 | * | ||
| 37 | * It implicitly converts a [[uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQueryAnswers]] | ||
| 38 | * into a [[uk.ac.ox.cs.pagoda.query.AnswerTuples]] to maintain | ||
| 39 | * compatibility betweren RSAComb and PAGOdA. | ||
| 40 | */ | ||
| 41 | object RSACombAnswerTuples { | ||
| 42 | |||
| 43 | implicit class RSACombAnswerTuples( | ||
| 44 | val answers: ConjunctiveQueryAnswers | ||
| 45 | ) extends AnswerTuples { | ||
| 46 | |||
| 47 | /* Iterator simulated using an index over an [[IndexedSeq]] | ||
| 48 | * | ||
| 49 | * This might not be the best solution, but at least it offers | ||
| 50 | * better flexibility than using the internal [[Seq]] iterator. | ||
| 51 | * On top of this, indexed access is guaranteed to be efficient. | ||
| 52 | */ | ||
| 53 | private var iter = answers.answers.map(_._2).toIndexedSeq | ||
| 54 | private var idx: Int = 0 | ||
| 55 | |||
| 56 | /** Reset the iterator over the answers. */ | ||
| 57 | def reset(): Unit = idx = 0 | ||
| 58 | |||
| 59 | /** True if the iterator can provide more items. */ | ||
| 60 | def isValid: Boolean = idx < iter.length | ||
| 61 | |||
| 62 | /** Get arity of answer variables. */ | ||
| 63 | def getArity: Int = answers.query.answer.length | ||
| 64 | |||
| 65 | /** Get array of answer variable names */ | ||
| 66 | def getAnswerVariables: Array[String] = | ||
| 67 | answers.query.answer.map(_.getName).toArray | ||
| 68 | |||
| 69 | /** Advance iterator state */ | ||
| 70 | def moveNext(): Unit = idx += 1 | ||
| 71 | |||
| 72 | /** Get next [[uk.ac.ox.cs.pagoda.query.AnswerTuple]] from the iterator */ | ||
| 73 | def getTuple: AnswerTuple = iter(idx) | ||
| 74 | |||
| 75 | /** Return true if the input tuple is part of this collection. | ||
| 76 | * | ||
| 77 | * @param tuple the answer to be checked. | ||
| 78 | * | ||
| 79 | * @note this operation is currently not supported. | ||
| 80 | */ | ||
| 81 | def contains(tuple: AnswerTuple): Boolean = ??? | ||
| 82 | |||
| 83 | /** Skip one item in the iterator. | ||
| 84 | * | ||
| 85 | * @note that the semantic of this method is not clear to the | ||
| 86 | * author and the description is just an assumption. | ||
| 87 | */ | ||
| 88 | def remove(): Unit = moveNext() | ||
| 89 | } | ||
| 90 | |||
| 91 | /** Implicit convertion from RSAComb-style answers to [[uk.ac.ox.cs.pagoda.query.AnswerTuple]] */ | ||
| 92 | private implicit def asAnswerTuple( | ||
| 93 | answer: Seq[Resource] | ||
| 94 | ): AnswerTuple = new AnswerTuple(answer.map(res => | ||
| 95 | res match { | ||
| 96 | case r: IRI => OldIndividual.create(r.getIRI) | ||
| 97 | case r: BlankNode => OldBlankNode.create(r.getID) | ||
| 98 | case r: Literal => OldLiteral.create(r.getLexicalForm,r.getDatatype) | ||
| 99 | } | ||
| 100 | ).toArray) | ||
| 101 | |||
| 102 | /** Implicit convertion from [[tech.oxfordsemantic.jrdfox.logic.Datatype]] to [[uk.ac.ox.cs.JRDFox.model.Datatype]] | ||
| 103 | * | ||
| 104 | * @note this might not be 100% accurate since the two interfaces are | ||
| 105 | * slightly different. | ||
| 106 | */ | ||
| 107 | private implicit def asOldDatatype( | ||
| 108 | datatype: Datatype | ||
| 109 | ): OldDatatype = datatype match { | ||
| 110 | case Datatype.BLANK_NODE => OldDatatype.BLANK_NODE | ||
| 111 | case Datatype.IRI_REFERENCE => OldDatatype.IRI_REFERENCE | ||
| 112 | case Datatype.RDF_PLAIN_LITERAL => OldDatatype.RDF_PLAIN_LITERAL | ||
| 113 | case Datatype.RDFS_LITERAL => OldDatatype.RDFS_LITERAL | ||
| 114 | case Datatype.XSD_ANY_URI => OldDatatype.XSD_ANY_URI | ||
| 115 | case Datatype.XSD_BOOLEAN => OldDatatype.XSD_BOOLEAN | ||
| 116 | case Datatype.XSD_BYTE => OldDatatype.XSD_BYTE | ||
| 117 | case Datatype.XSD_DATE => OldDatatype.XSD_DATE | ||
| 118 | case Datatype.XSD_DATE_TIME => OldDatatype.XSD_DATE_TIME | ||
| 119 | case Datatype.XSD_DATE_TIME_STAMP => OldDatatype.XSD_DATE_TIME_STAMP | ||
| 120 | case Datatype.XSD_DECIMAL => OldDatatype.XSD_DECIMAL | ||
| 121 | case Datatype.XSD_DOUBLE => OldDatatype.XSD_DOUBLE | ||
| 122 | case Datatype.XSD_DURATION => OldDatatype.XSD_DURATION | ||
| 123 | case Datatype.XSD_FLOAT => OldDatatype.XSD_FLOAT | ||
| 124 | case Datatype.XSD_G_DAY => OldDatatype.XSD_G_DAY | ||
| 125 | case Datatype.XSD_G_MONTH => OldDatatype.XSD_G_MONTH | ||
| 126 | case Datatype.XSD_G_MONTH_DAY => OldDatatype.XSD_G_MONTH_DAY | ||
| 127 | case Datatype.XSD_G_YEAR => OldDatatype.XSD_G_YEAR | ||
| 128 | case Datatype.XSD_G_YEAR_MONTH => OldDatatype.XSD_G_YEAR_MONTH | ||
| 129 | case Datatype.XSD_INT => OldDatatype.XSD_INT | ||
| 130 | case Datatype.XSD_INTEGER => OldDatatype.XSD_INTEGER | ||
| 131 | case Datatype.XSD_LONG => OldDatatype.XSD_LONG | ||
| 132 | case Datatype.XSD_NEGATIVE_INTEGER => OldDatatype.XSD_NEGATIVE_INTEGER | ||
| 133 | case Datatype.XSD_NON_NEGATIVE_INTEGER => OldDatatype.XSD_NON_NEGATIVE_INTEGER | ||
| 134 | case Datatype.XSD_NON_POSITIVE_INTEGER => OldDatatype.XSD_NON_POSITIVE_INTEGER | ||
| 135 | case Datatype.XSD_POSITIVE_INTEGER => OldDatatype.XSD_POSITIVE_INTEGER | ||
| 136 | case Datatype.XSD_SHORT => OldDatatype.XSD_SHORT | ||
| 137 | case Datatype.XSD_STRING => OldDatatype.XSD_STRING | ||
| 138 | case Datatype.XSD_TIME => OldDatatype.XSD_TIME | ||
| 139 | case Datatype.XSD_UNSIGNED_BYTE => OldDatatype.XSD_UNSIGNED_BYTE | ||
| 140 | case Datatype.XSD_UNSIGNED_INT => OldDatatype.XSD_UNSIGNED_INT | ||
| 141 | case Datatype.XSD_UNSIGNED_LONG => OldDatatype.XSD_UNSIGNED_LONG | ||
| 142 | case Datatype.XSD_UNSIGNED_SHORT => OldDatatype.XSD_UNSIGNED_SHORT | ||
| 143 | case Datatype.XSD_DAY_TIME_DURATION => OldDatatype.XSD_DURATION | ||
| 144 | case Datatype.XSD_YEAR_MONTH_DURATION => OldDatatype.XSD_DURATION | ||
| 145 | case Datatype.INVALID_DATATYPE => OldDatatype.XSD_ANY_URI | ||
| 146 | } | ||
| 147 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala new file mode 100644 index 0000000..8358359 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala | |||
| @@ -0,0 +1,541 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua.reasoner | ||
| 18 | |||
| 19 | import java.util.LinkedList; | ||
| 20 | |||
| 21 | import scala.collection.JavaConverters._ | ||
| 22 | import org.semanticweb.karma2.profile.ELHOProfile | ||
| 23 | import org.semanticweb.owlapi.model.OWLOntology | ||
| 24 | import org.semanticweb.owlapi.model.parameters.Imports | ||
| 25 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 26 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine | ||
| 27 | import uk.ac.ox.cs.pagoda.owl.OWLHelper | ||
| 28 | import uk.ac.ox.cs.pagoda.query.{ | ||
| 29 | AnswerTuples, | ||
| 30 | GapByStore4ID, | ||
| 31 | GapByStore4ID2, | ||
| 32 | QueryRecord, | ||
| 33 | } | ||
| 34 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step | ||
| 35 | import uk.ac.ox.cs.pagoda.reasoner.{ | ||
| 36 | MyQueryReasoner, | ||
| 37 | QueryReasoner | ||
| 38 | } | ||
| 39 | import uk.ac.ox.cs.pagoda.reasoner.light.{KarmaQueryEngine,BasicQueryEngine} | ||
| 40 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram | ||
| 41 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; | ||
| 42 | import uk.ac.ox.cs.pagoda.tracking.{ | ||
| 43 | QueryTracker, | ||
| 44 | TrackingRuleEncoder, | ||
| 45 | TrackingRuleEncoderDisjVar1, | ||
| 46 | TrackingRuleEncoderWithGap, | ||
| 47 | } | ||
| 48 | import uk.ac.ox.cs.pagoda.util.{ | ||
| 49 | ExponentialInterpolation, | ||
| 50 | PagodaProperties, | ||
| 51 | Timer, | ||
| 52 | Utility | ||
| 53 | } | ||
| 54 | import uk.ac.ox.cs.pagoda.util.tuples.Tuple; | ||
| 55 | import uk.ac.ox.cs.rsacomb.ontology.Ontology | ||
| 56 | import uk.ac.ox.cs.rsacomb.approximation.{Lowerbound,Upperbound} | ||
| 57 | |||
| 58 | class AcquaQueryReasoner(val ontology: Ontology) | ||
| 59 | extends QueryReasoner { | ||
| 60 | |||
| 61 | /** Compatibility convertions between PAGOdA and RSAComb */ | ||
| 62 | import uk.ac.ox.cs.acqua.implicits.PagodaConverters._ | ||
| 63 | |||
| 64 | var encoder: Option[TrackingRuleEncoder] = None | ||
| 65 | private var lazyUpperStore: Option[MultiStageQueryEngine] = None; | ||
| 66 | |||
| 67 | private val timer: Timer = new Timer(); | ||
| 68 | |||
| 69 | private var _isConsistent: ConsistencyStatus = StatusUnchecked | ||
| 70 | // TODO: explicit casting to MyQueryReasoner makes no sense. Find | ||
| 71 | // another solution. Probably requires changing PAGOdA source code. | ||
| 72 | private val consistencyManager: ConsistencyManager = new ConsistencyManager(this) | ||
| 73 | |||
| 74 | val rlLowerStore: BasicQueryEngine = new BasicQueryEngine("rl-lower-bound") | ||
| 75 | val elLowerStore: KarmaQueryEngine = new KarmaQueryEngine("elho-lower-bound") | ||
| 76 | private lazy val lowerRSAEngine = new RSACombQueryReasoner(ontology, new Lowerbound) | ||
| 77 | private lazy val upperRSAEngine = new RSACombQueryReasoner(ontology, new Upperbound) | ||
| 78 | |||
| 79 | val trackingStore = new MultiStageQueryEngine("tracking", false); | ||
| 80 | |||
| 81 | var predicatesWithGap: Seq[String] = Seq.empty | ||
| 82 | |||
| 83 | /* Load ontology into PAGOdA */ | ||
| 84 | val datalog = new DatalogProgram(ontology.origin); | ||
| 85 | //datalog.getGeneral().save(); | ||
| 86 | if (!datalog.getGeneral().isHorn()) | ||
| 87 | lazyUpperStore = Some(new MultiStageQueryEngine("lazy-upper-bound", true)) | ||
| 88 | importData(datalog.getAdditionalDataFile()) | ||
| 89 | private val elhoOntology: OWLOntology = new ELHOProfile().getFragment(ontology.origin); | ||
| 90 | elLowerStore processOntology elhoOntology | ||
| 91 | |||
| 92 | |||
| 93 | /** Performs nothing. | ||
| 94 | * | ||
| 95 | * Loading of the ontology is performed at instance creation to avoid | ||
| 96 | * unnecessary complexity (see main class constructor). | ||
| 97 | * | ||
| 98 | * @note Implemented for compatibility with other reasoners. | ||
| 99 | */ | ||
| 100 | def loadOntology(ontology: OWLOntology): Unit = { } | ||
| 101 | |||
| 102 | /** Preprocessing of input ontology. | ||
| 103 | * | ||
| 104 | * This is mostly PAGOdA related. Note that, while most of the | ||
| 105 | * computation in RSAComb is performed "on-demand", we are forcing | ||
| 106 | * the approximation from above/below of the input ontology to RSA, | ||
| 107 | * and the compuation of their respective canonical models to make timing | ||
| 108 | * measured more consistent. | ||
| 109 | * | ||
| 110 | * @returns whether the input ontology is found consistent after the | ||
| 111 | * preprocessing phase. | ||
| 112 | */ | ||
| 113 | def preprocess(): Boolean = { | ||
| 114 | timer.reset(); | ||
| 115 | Utility logInfo "Preprocessing (and checking satisfiability)..." | ||
| 116 | |||
| 117 | val name = "data" | ||
| 118 | val datafile = getImportedData() | ||
| 119 | |||
| 120 | /* RL lower-bound check */ | ||
| 121 | rlLowerStore.importRDFData(name, datafile); | ||
| 122 | rlLowerStore.materialise("lower program", datalog.getLower.toString); | ||
| 123 | if (!consistencyManager.checkRLLowerBound) { | ||
| 124 | Utility logDebug s"time for satisfiability checking: ${timer.duration()}" | ||
| 125 | _isConsistent = StatusInconsistent | ||
| 126 | return false | ||
| 127 | } | ||
| 128 | Utility logDebug s"The number of 'sameAs' assertions in RL lower store: ${rlLowerStore.getSameAsNumber}" | ||
| 129 | |||
| 130 | /* EHLO lower bound check */ | ||
| 131 | val originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology.origin) | ||
| 132 | elLowerStore.importRDFData(name, datafile); | ||
| 133 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 134 | elLowerStore.materialise("lower program", datalog.getLower.toString); | ||
| 135 | elLowerStore.initialiseKarma(); | ||
| 136 | if (!consistencyManager.checkELLowerBound) { | ||
| 137 | Utility logDebug s"time for satisfiability checking: ${timer.duration()}" | ||
| 138 | _isConsistent = StatusInconsistent | ||
| 139 | return false | ||
| 140 | } | ||
| 141 | |||
| 142 | /* Lazy upper store */ | ||
| 143 | val tag = lazyUpperStore.map(store => { | ||
| 144 | store.importRDFData(name, datafile) | ||
| 145 | store.materialise("saturate named individuals", originalMarkProgram) | ||
| 146 | store.materialiseRestrictedly(datalog, null) | ||
| 147 | }).getOrElse(1) | ||
| 148 | if (tag == -1) { | ||
| 149 | Utility logDebug s"time for satisfiability checking: ${timer.duration()}" | ||
| 150 | _isConsistent = StatusInconsistent | ||
| 151 | return false | ||
| 152 | } | ||
| 153 | lazyUpperStore.flatMap(store => { store.dispose(); None }) | ||
| 154 | |||
| 155 | trackingStore.importRDFData(name, datafile) | ||
| 156 | trackingStore.materialise("saturate named individuals", originalMarkProgram) | ||
| 157 | val gap: GapByStore4ID = new GapByStore4ID2(trackingStore, rlLowerStore); | ||
| 158 | trackingStore.materialiseFoldedly(datalog, gap); | ||
| 159 | this.predicatesWithGap = gap.getPredicatesWithGap.asScala.toSeq; | ||
| 160 | gap.clear(); | ||
| 161 | |||
| 162 | if (datalog.getGeneral.isHorn) | ||
| 163 | encoder = Some(new TrackingRuleEncoderWithGap(datalog.getUpper, trackingStore)) | ||
| 164 | else | ||
| 165 | encoder = Some(new TrackingRuleEncoderDisjVar1(datalog.getUpper, trackingStore)) | ||
| 166 | |||
| 167 | /* Perform consistency checking if not already inconsistent */ | ||
| 168 | if (!isConsistent()) return false | ||
| 169 | consistencyManager.extractBottomFragment(); | ||
| 170 | |||
| 171 | /* Force computation of lower RSA approximations and its canonical | ||
| 172 | * model. We wait to process the upperbound since it might not be | ||
| 173 | * necessary after all. */ | ||
| 174 | lowerRSAEngine.preprocess() | ||
| 175 | //upperRSAEngine.preprocess() | ||
| 176 | |||
| 177 | true | ||
| 178 | } | ||
| 179 | |||
| 180 | /** Returns a the consistency status of the ontology. | ||
| 181 | * | ||
| 182 | * Performs a consistency check if the current status is undefined. | ||
| 183 | * Some logging is performed as well. | ||
| 184 | * | ||
| 185 | * @returns true if the ontology is consistent, false otherwise. | ||
| 186 | */ | ||
| 187 | def isConsistent(): Boolean = { | ||
| 188 | if (_isConsistent == StatusUnchecked) { | ||
| 189 | _isConsistent = consistencyManager.check | ||
| 190 | Utility logDebug s"time for satisfiability checking: ${timer.duration()}" | ||
| 191 | } | ||
| 192 | Utility logInfo s"The ontology is ${_isConsistent}!" | ||
| 193 | return _isConsistent.asBoolean | ||
| 194 | } | ||
| 195 | |||
| 196 | /** Evaluate a query against this reasoner. | ||
| 197 | * | ||
| 198 | * This is the main entry to compute the answers to a query. | ||
| 199 | * By the end of the computation, the query record passed as input | ||
| 200 | * will contain the answers found during the answering process. | ||
| 201 | * This behaves conservately and will try very hard not to perform | ||
| 202 | * unnecessary computation. | ||
| 203 | * | ||
| 204 | * @param query the query record to evaluate. | ||
| 205 | */ | ||
| 206 | def evaluate(query: QueryRecord): Unit = { | ||
| 207 | queryLowerAndUpperBounds(query) | ||
| 208 | val processed = | ||
| 209 | queryRSALowerBound(query) || | ||
| 210 | queryRSAUpperBound(query) | ||
| 211 | if (!processed) { | ||
| 212 | val relevantOntologySubset: OWLOntology = | ||
| 213 | extractRelevantOntologySubset(query) | ||
| 214 | |||
| 215 | if (properties.getSkolemUpperBound == PagodaProperties.SkolemUpperBoundOptions.BEFORE_SUMMARISATION && | ||
| 216 | querySkolemisedRelevantSubset(relevantOntologySubset, query) | ||
| 217 | ) return; | ||
| 218 | |||
| 219 | Utility logInfo ">> Summarisation <<" | ||
| 220 | val summarisedChecker: HermitSummaryFilter = | ||
| 221 | new HermitSummaryFilter(query, properties.getToCallHermiT) | ||
| 222 | if(summarisedChecker.check(query.getGapAnswers) == 0) { | ||
| 223 | summarisedChecker.dispose() | ||
| 224 | return; | ||
| 225 | } | ||
| 226 | |||
| 227 | if (properties.getSkolemUpperBound == PagodaProperties.SkolemUpperBoundOptions.AFTER_SUMMARISATION && | ||
| 228 | querySkolemisedRelevantSubset(relevantOntologySubset, query) | ||
| 229 | ) { | ||
| 230 | summarisedChecker.dispose() | ||
| 231 | return; | ||
| 232 | } | ||
| 233 | |||
| 234 | Utility logInfo ">> Full reasoning <<" | ||
| 235 | timer.reset() | ||
| 236 | summarisedChecker checkByFullReasoner query.getGapAnswers | ||
| 237 | Utility logDebug s"Total time for full reasoner: ${timer.duration()}" | ||
| 238 | |||
| 239 | if (properties.getToCallHermiT) query.markAsProcessed() | ||
| 240 | |||
| 241 | summarisedChecker.dispose() | ||
| 242 | } | ||
| 243 | } | ||
| 244 | |||
| 245 | /** Only compute the upperbound for a query. | ||
| 246 | * | ||
| 247 | * @note this is not supported at the moment. Look at | ||
| 248 | * [[uk.ac.ox.cs.pagoda.reasoner.MyQueryReasoner]] for an example | ||
| 249 | * implementation. | ||
| 250 | */ | ||
| 251 | def evaluateUpper(record: QueryRecord): Unit = ??? | ||
| 252 | |||
| 253 | /** Clean up the query reasoner */ | ||
| 254 | override def dispose(): Unit = { | ||
| 255 | super.dispose() | ||
| 256 | if(encoder.isDefined) encoder.get.dispose() | ||
| 257 | if(rlLowerStore != null) rlLowerStore.dispose(); | ||
| 258 | if(lazyUpperStore.isDefined) lazyUpperStore.get.dispose(); | ||
| 259 | if(elLowerStore != null) elLowerStore.dispose(); | ||
| 260 | if(trackingStore != null) trackingStore.dispose(); | ||
| 261 | if(consistencyManager != null) consistencyManager.dispose(); | ||
| 262 | if(datalog != null) datalog.dispose(); | ||
| 263 | } | ||
| 264 | |||
| 265 | /** Perform CQ anwering for a specific upper bound engine. | ||
| 266 | * | ||
| 267 | * @param store upper bound engine to be used in the computation. | ||
| 268 | * @param query query record. | ||
| 269 | * @param queryText actual text of the query to be executed. | ||
| 270 | * @param answerVariables answer variables for the query. | ||
| 271 | */ | ||
| 272 | private def queryUpperBound( | ||
| 273 | store: BasicQueryEngine, | ||
| 274 | query: QueryRecord, | ||
| 275 | queryText: String, | ||
| 276 | answerVariables: Array[String] | ||
| 277 | ): Unit = { | ||
| 278 | var rlAnswer: AnswerTuples = null | ||
| 279 | try { | ||
| 280 | Utility logDebug queryText | ||
| 281 | rlAnswer = store.evaluate(queryText, answerVariables) | ||
| 282 | Utility logDebug timer.duration() | ||
| 283 | query updateUpperBoundAnswers rlAnswer | ||
| 284 | } finally { | ||
| 285 | if (rlAnswer != null) rlAnswer.dispose() | ||
| 286 | } | ||
| 287 | } | ||
| 288 | |||
| 289 | /** Perform CQ anwering for a specific upper bound engine. | ||
| 290 | * | ||
| 291 | * @param store upper bound engine to be used in the computation. | ||
| 292 | * @param query query record. | ||
| 293 | * @param extendedQuery extended version of the query. | ||
| 294 | * @param step difficulty of the current step. | ||
| 295 | * @returns whether the query has been fully answered, i.e., the | ||
| 296 | * bounds computed so far coincide. | ||
| 297 | * | ||
| 298 | * @note It deals with blanks nodes differently from variables | ||
| 299 | * according to SPARQL semantics for OWL2 Entailment Regime. In | ||
| 300 | * particular variables are matched only against named individuals, | ||
| 301 | * and blank nodes against named and anonymous individuals. | ||
| 302 | */ | ||
| 303 | private def queryUpperStore( | ||
| 304 | upperStore: BasicQueryEngine, | ||
| 305 | query: QueryRecord, | ||
| 306 | extendedQuery: Tuple[String], | ||
| 307 | step: Step | ||
| 308 | ): Boolean = { | ||
| 309 | timer.reset(); | ||
| 310 | |||
| 311 | Utility logDebug "First query type" | ||
| 312 | queryUpperBound(upperStore, query, query.getQueryText, query.getAnswerVariables) | ||
| 313 | if (!query.isProcessed() && !query.getQueryText().equals(extendedQuery.get(0))) { | ||
| 314 | Utility logDebug "Second query type" | ||
| 315 | queryUpperBound(upperStore, query, extendedQuery.get(0), query.getAnswerVariables) | ||
| 316 | } | ||
| 317 | if (!query.isProcessed() && query.hasNonAnsDistinguishedVariables()) { | ||
| 318 | Utility logDebug "Third query type" | ||
| 319 | queryUpperBound(upperStore, query, extendedQuery.get(1), query.getDistinguishedVariables) | ||
| 320 | } | ||
| 321 | |||
| 322 | query.addProcessingTime(step, timer.duration()) | ||
| 323 | if (query.isProcessed()) query.setDifficulty(step) | ||
| 324 | query.isProcessed() | ||
| 325 | } | ||
| 326 | |||
| 327 | /** Computes the bounds to the answers for a query. | ||
| 328 | * | ||
| 329 | * Both the lower (RL + ELHO) and upper bounds are computed here. | ||
| 330 | * | ||
| 331 | * @param query the query to be executed | ||
| 332 | * @returns whether the query has been fully answered, i.e., the | ||
| 333 | * bounds computed so far coincide. | ||
| 334 | */ | ||
| 335 | private def queryLowerAndUpperBounds(query: QueryRecord): Boolean = { | ||
| 336 | Utility logInfo ">> Base bounds <<" | ||
| 337 | val extendedQueryTexts: Tuple[String] = query.getExtendedQueryText() | ||
| 338 | var rlAnswer: AnswerTuples = null | ||
| 339 | var elAnswer: AnswerTuples = null | ||
| 340 | |||
| 341 | /* Compute RL lower bound answers */ | ||
| 342 | timer.reset(); | ||
| 343 | try { | ||
| 344 | rlAnswer = rlLowerStore.evaluate(query.getQueryText, query.getAnswerVariables) | ||
| 345 | Utility logDebug timer.duration() | ||
| 346 | query updateLowerBoundAnswers rlAnswer | ||
| 347 | } finally { | ||
| 348 | if (rlAnswer != null) rlAnswer.dispose() | ||
| 349 | } | ||
| 350 | query.addProcessingTime(Step.LOWER_BOUND, timer.duration()); | ||
| 351 | |||
| 352 | /* Compute upper bound answers */ | ||
| 353 | if(properties.getUseAlwaysSimpleUpperBound() || lazyUpperStore.isEmpty) { | ||
| 354 | Utility logDebug "Tracking store" | ||
| 355 | if (queryUpperStore(trackingStore, query, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) | ||
| 356 | return true; | ||
| 357 | } | ||
| 358 | if (!query.isBottom) { | ||
| 359 | Utility logDebug "Lazy store" | ||
| 360 | if (lazyUpperStore.isDefined && queryUpperStore(lazyUpperStore.get, query, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) | ||
| 361 | return true | ||
| 362 | } | ||
| 363 | |||
| 364 | timer.reset() | ||
| 365 | /* Compute ELHO lower bound answers */ | ||
| 366 | try { | ||
| 367 | elAnswer = elLowerStore.evaluate( | ||
| 368 | extendedQueryTexts.get(0), | ||
| 369 | query.getAnswerVariables, | ||
| 370 | query.getLowerBoundAnswers | ||
| 371 | ) | ||
| 372 | Utility logDebug timer.duration() | ||
| 373 | query updateLowerBoundAnswers elAnswer | ||
| 374 | } finally { | ||
| 375 | if (elAnswer != null) elAnswer.dispose() | ||
| 376 | } | ||
| 377 | query.addProcessingTime(Step.EL_LOWER_BOUND, timer.duration()) | ||
| 378 | |||
| 379 | if (query.isProcessed()) query.setDifficulty(Step.EL_LOWER_BOUND) | ||
| 380 | query.isProcessed() | ||
| 381 | } | ||
| 382 | |||
| 383 | /** Compute lower bound using RSAComb. | ||
| 384 | * | ||
| 385 | * @param query query record to update. | ||
| 386 | * @returns true if the query is fully answered. | ||
| 387 | */ | ||
| 388 | private def queryRSALowerBound(query: QueryRecord): Boolean = { | ||
| 389 | lowerRSAEngine evaluate query | ||
| 390 | query.isProcessed | ||
| 391 | } | ||
| 392 | |||
| 393 | /** Compute upper bound using RSAComb. | ||
| 394 | * | ||
| 395 | * @param query query record to update. | ||
| 396 | * @returns true if the query is fully answered. | ||
| 397 | */ | ||
| 398 | private def queryRSAUpperBound(query: QueryRecord): Boolean = { | ||
| 399 | upperRSAEngine evaluate query | ||
| 400 | query.isProcessed | ||
| 401 | } | ||
| 402 | |||
| 403 | /** Extract a subset of the ontology relevant to the query. | ||
| 404 | * | ||
| 405 | * @param query query record for which the subset ontology is computed. | ||
| 406 | * @returns an [[OWLOntology]] subset of the input ontology. | ||
| 407 | */ | ||
| 408 | private def extractRelevantOntologySubset(query: QueryRecord): OWLOntology = { | ||
| 409 | Utility logInfo ">> Relevant ontology-subset extraction <<" | ||
| 410 | |||
| 411 | timer.reset() | ||
| 412 | |||
| 413 | val tracker: QueryTracker = | ||
| 414 | new QueryTracker(encoder.get, rlLowerStore, query) | ||
| 415 | val relevantOntologySubset: OWLOntology = | ||
| 416 | tracker.extract( trackingStore, consistencyManager.getQueryRecords, true) | ||
| 417 | |||
| 418 | query.addProcessingTime(Step.FRAGMENT, timer.duration()) | ||
| 419 | |||
| 420 | val numOfABoxAxioms: Int = relevantOntologySubset.getABoxAxioms(Imports.INCLUDED).size | ||
| 421 | val numOfTBoxAxioms: Int = relevantOntologySubset.getAxiomCount() - numOfABoxAxioms | ||
| 422 | Utility logInfo s"Relevant ontology-subset has been extracted: |ABox|=$numOfABoxAxioms, |TBox|=$numOfTBoxAxioms" | ||
| 423 | |||
| 424 | return relevantOntologySubset | ||
| 425 | } | ||
| 426 | |||
| 427 | /** Query the skolemized ontology subset relevant to a query record. | ||
| 428 | * | ||
| 429 | * @param relevantSubset the relevant ontology subset. | ||
| 430 | * @param query the query to be answered. | ||
| 431 | * @returns true if the query has been fully answered. | ||
| 432 | * | ||
| 433 | * TODO: the code has been adapted from [[uk.ac.ox.cs.pagoda.reasoner.MyQueryReasoner]] | ||
| 434 | * and ported to Scala. There are better, more Scala-esque ways of | ||
| 435 | * deal with the big `while` in this function, but this should work | ||
| 436 | * for now. | ||
| 437 | */ | ||
| 438 | private def querySkolemisedRelevantSubset( | ||
| 439 | relevantSubset: OWLOntology, | ||
| 440 | query: QueryRecord | ||
| 441 | ): Boolean = { | ||
| 442 | Utility logInfo ">> Semi-Skolemisation <<" | ||
| 443 | timer.reset() | ||
| 444 | |||
| 445 | val relevantProgram: DatalogProgram = new DatalogProgram(relevantSubset) | ||
| 446 | val relevantStore: MultiStageQueryEngine = | ||
| 447 | new MultiStageQueryEngine("Relevant-store", true) | ||
| 448 | relevantStore importDataFromABoxOf relevantSubset | ||
| 449 | val relevantOriginalMarkProgram: String = | ||
| 450 | OWLHelper getOriginalMarkProgram relevantSubset | ||
| 451 | relevantStore.materialise("Mark original individuals", relevantOriginalMarkProgram) | ||
| 452 | |||
| 453 | var isFullyProcessed = false | ||
| 454 | val lastTwoTriplesCounts: LinkedList[Tuple[Long]] = new LinkedList() | ||
| 455 | var currentMaxTermDepth = 1 | ||
| 456 | var keepGoing = true | ||
| 457 | while (!isFullyProcessed && keepGoing) { | ||
| 458 | if (currentMaxTermDepth > properties.getSkolemDepth) { | ||
| 459 | Utility logInfo "Maximum term depth reached" | ||
| 460 | keepGoing = false | ||
| 461 | } else if ( | ||
| 462 | lastTwoTriplesCounts.size() == 2 && ( | ||
| 463 | lastTwoTriplesCounts.get(0).get(1).equals(lastTwoTriplesCounts.get(1).get(1)) || | ||
| 464 | { | ||
| 465 | val interpolation: ExponentialInterpolation = | ||
| 466 | new ExponentialInterpolation( | ||
| 467 | lastTwoTriplesCounts.get(0).get(0), | ||
| 468 | lastTwoTriplesCounts.get(0).get(1), | ||
| 469 | lastTwoTriplesCounts.get(1).get(0), | ||
| 470 | lastTwoTriplesCounts.get(1).get(1) | ||
| 471 | ) | ||
| 472 | val triplesEstimate: Double = | ||
| 473 | interpolation computeValue currentMaxTermDepth | ||
| 474 | Utility logDebug s"Estimate of the number of triples: $triplesEstimate" | ||
| 475 | if (triplesEstimate > properties.getMaxTriplesInSkolemStore) | ||
| 476 | Utility logInfo "Interrupting Semi-Skolemisation because of triples count limit" | ||
| 477 | triplesEstimate > properties.getMaxTriplesInSkolemStore | ||
| 478 | } | ||
| 479 | ) | ||
| 480 | ) { | ||
| 481 | keepGoing = false | ||
| 482 | } else { | ||
| 483 | Utility logInfo s"Trying with maximum depth $currentMaxTermDepth" | ||
| 484 | |||
| 485 | val materialisationTag: Int = | ||
| 486 | relevantStore.materialiseSkolemly(relevantProgram, null, currentMaxTermDepth) | ||
| 487 | query.addProcessingTime(Step.SKOLEM_UPPER_BOUND, timer.duration()) | ||
| 488 | if (materialisationTag == -1) { | ||
| 489 | relevantStore.dispose() | ||
| 490 | throw new Error("A consistent ontology has turned out to be inconsistent in the Skolemises-relevant-upper-store") | ||
| 491 | } | ||
| 492 | |||
| 493 | if (materialisationTag != 1) { | ||
| 494 | Utility logInfo "Semi-Skolemised relevant upper store cannot be employed" | ||
| 495 | keepGoing = false | ||
| 496 | } else { | ||
| 497 | Utility logInfo "Querying semi-Skolemised upper store..." | ||
| 498 | isFullyProcessed = queryUpperStore( | ||
| 499 | relevantStore, query, query.getExtendedQueryText(), Step.SKOLEM_UPPER_BOUND | ||
| 500 | ) | ||
| 501 | |||
| 502 | try { | ||
| 503 | lastTwoTriplesCounts.add(new Tuple(currentMaxTermDepth, relevantStore.getStoreSize)) | ||
| 504 | if (lastTwoTriplesCounts.size() > 2) | ||
| 505 | lastTwoTriplesCounts.remove() | ||
| 506 | Utility logDebug s"Last two triples counts: $lastTwoTriplesCounts" | ||
| 507 | currentMaxTermDepth += 1 | ||
| 508 | } catch { | ||
| 509 | case e: JRDFStoreException => { | ||
| 510 | e.printStackTrace() | ||
| 511 | keepGoing = false | ||
| 512 | } | ||
| 513 | } | ||
| 514 | } | ||
| 515 | } | ||
| 516 | } | ||
| 517 | |||
| 518 | relevantStore.dispose() | ||
| 519 | Utility logInfo "Semi-Skolemised relevant upper store has been evaluated" | ||
| 520 | isFullyProcessed | ||
| 521 | } | ||
| 522 | |||
| 523 | /** Consistency status of the ontology */ | ||
| 524 | private sealed trait ConsistencyStatus { | ||
| 525 | val asBoolean = false | ||
| 526 | } | ||
| 527 | private case object StatusConsistent extends ConsistencyStatus { | ||
| 528 | override val asBoolean = true | ||
| 529 | override def toString(): String = "consistent" | ||
| 530 | } | ||
| 531 | private case object StatusInconsistent extends ConsistencyStatus { | ||
| 532 | override def toString(): String = "inconsistent" | ||
| 533 | } | ||
| 534 | private case object StatusUnchecked extends ConsistencyStatus { | ||
| 535 | override def toString(): String = "N/A" | ||
| 536 | } | ||
| 537 | private implicit def boolean2consistencyStatus(b: Boolean): ConsistencyStatus = { | ||
| 538 | if (b) StatusConsistent else StatusInconsistent | ||
| 539 | } | ||
| 540 | |||
| 541 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/reasoner/ConsistencyManager.scala b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/ConsistencyManager.scala new file mode 100644 index 0000000..4b73f88 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/ConsistencyManager.scala | |||
| @@ -0,0 +1,336 @@ | |||
| 1 | package uk.ac.ox.cs.acqua.reasoner | ||
| 2 | |||
| 3 | import java.util.LinkedList | ||
| 4 | import scala.collection.JavaConverters._ | ||
| 5 | |||
| 6 | import org.semanticweb.HermiT.model.{ | ||
| 7 | Atom, | ||
| 8 | AtomicConcept, | ||
| 9 | DLClause, | ||
| 10 | Variable | ||
| 11 | } | ||
| 12 | import org.semanticweb.owlapi.model.{ | ||
| 13 | OWLOntology, | ||
| 14 | OWLOntologyCreationException, | ||
| 15 | OWLOntologyManager | ||
| 16 | } | ||
| 17 | import uk.ac.ox.cs.JRDFox.JRDFStoreException | ||
| 18 | // import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 19 | import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType | ||
| 20 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper | ||
| 21 | import uk.ac.ox.cs.pagoda.query.{ | ||
| 22 | AnswerTuples, | ||
| 23 | QueryRecord | ||
| 24 | } | ||
| 25 | // import uk.ac.ox.cs.pagoda.query.QueryManager; | ||
| 26 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker | ||
| 27 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine | ||
| 28 | // import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; | ||
| 29 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter | ||
| 30 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker | ||
| 31 | // import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; | ||
| 32 | import uk.ac.ox.cs.pagoda.util.{ | ||
| 33 | Timer, | ||
| 34 | Utility | ||
| 35 | } | ||
| 36 | import uk.ac.ox.cs.pagoda.util.disposable.{ | ||
| 37 | Disposable, | ||
| 38 | DisposedException | ||
| 39 | } | ||
| 40 | // import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 41 | |||
| 42 | /** Consistency checker inspired by [[uk.ac.ox.cs.pagoda.reaseoner.ConsistencyManager]]. | ||
| 43 | * | ||
| 44 | * @param reasoner an [[AcquaQueryReasoner]] instance | ||
| 45 | * | ||
| 46 | * TODO: document public methods and rework the code to be more | ||
| 47 | * Scala-esque. | ||
| 48 | */ | ||
| 49 | class ConsistencyManager( | ||
| 50 | protected val reasoner: AcquaQueryReasoner | ||
| 51 | ) extends Disposable { | ||
| 52 | |||
| 53 | protected val queryManager = reasoner.getQueryManager | ||
| 54 | private val timer = new Timer() | ||
| 55 | private var fragmentExtracted = false | ||
| 56 | |||
| 57 | private var fullQueryRecord: Option[QueryRecord] = None | ||
| 58 | private var botQueryRecords = Array.empty[QueryRecord] | ||
| 59 | private var toAddClauses = new LinkedList[DLClause]() | ||
| 60 | |||
| 61 | |||
| 62 | override def dispose(): Unit = { | ||
| 63 | super.dispose() | ||
| 64 | fullQueryRecord.map(_.dispose()) | ||
| 65 | } | ||
| 66 | |||
| 67 | /** | ||
| 68 | * | ||
| 69 | */ | ||
| 70 | def extractBottomFragment(): Unit = { | ||
| 71 | if (isDisposed) throw new DisposedException | ||
| 72 | if (!fragmentExtracted) { | ||
| 73 | fragmentExtracted = true | ||
| 74 | |||
| 75 | val upperProgram = reasoner.datalog.getUpper | ||
| 76 | val number = upperProgram.getBottomNumber | ||
| 77 | |||
| 78 | if (number <= 1) { | ||
| 79 | botQueryRecords = Array[QueryRecord](fullQueryRecord.get) | ||
| 80 | } else { | ||
| 81 | var record: QueryRecord = null | ||
| 82 | var tempQueryRecords = new Array[QueryRecord](number-1) | ||
| 83 | for(i <- 0 until (number-1)) { | ||
| 84 | record = queryManager.create(QueryRecord.botQueryText.replace("Nothing", s"Nothing${i+1}"), 0, i + 1) | ||
| 85 | tempQueryRecords(i) = record | ||
| 86 | var iter: AnswerTuples = null | ||
| 87 | try { | ||
| 88 | iter = reasoner.trackingStore.evaluate(record.getQueryText, record.getAnswerVariables) | ||
| 89 | record updateUpperBoundAnswers iter | ||
| 90 | } finally { | ||
| 91 | if (iter != null) iter.dispose() | ||
| 92 | } | ||
| 93 | } | ||
| 94 | |||
| 95 | var bottomNumber = 0; | ||
| 96 | val group = (0 until (number-1)).toArray | ||
| 97 | for(i <- 0 until (number-1)) | ||
| 98 | if (tempQueryRecords(i).isProcessed) | ||
| 99 | tempQueryRecords(i).dispose() | ||
| 100 | else if(group(i) == i) { | ||
| 101 | bottomNumber += 1 | ||
| 102 | record = tempQueryRecords(i) | ||
| 103 | for (j <- i until (number-1)) | ||
| 104 | if (record hasSameGapAnswers tempQueryRecords(j)) | ||
| 105 | group(j) = i | ||
| 106 | } | ||
| 107 | |||
| 108 | Utility logInfo s"There are $bottomNumber different bottom fragments." | ||
| 109 | toAddClauses = new LinkedList[DLClause]() | ||
| 110 | var bottomCounter = 0 | ||
| 111 | botQueryRecords = new Array[QueryRecord](bottomNumber) | ||
| 112 | val X = Variable.create("X") | ||
| 113 | for(i <- 0 until (number-1)) { | ||
| 114 | if (!tempQueryRecords(i).isDisposed() && !tempQueryRecords(i).isProcessed()) { | ||
| 115 | if (group(i) == i) { | ||
| 116 | record = tempQueryRecords(i) | ||
| 117 | botQueryRecords(bottomCounter) = record | ||
| 118 | bottomCounter += 1 | ||
| 119 | group(i) = bottomCounter | ||
| 120 | record.resetInfo( | ||
| 121 | QueryRecord.botQueryText.replace( | ||
| 122 | "Nothing", | ||
| 123 | s"Nothing_final$bottomCounter" | ||
| 124 | ), 0, bottomCounter) | ||
| 125 | toAddClauses.add( | ||
| 126 | DLClause.create( | ||
| 127 | Array[Atom](Atom.create(AtomicConcept.create(s"${AtomicConcept.NOTHING.getIRI}_final$bottomCounter"), X)), | ||
| 128 | Array[Atom](Atom.create(AtomicConcept.create(s"${AtomicConcept.NOTHING.getIRI}${i+1}"), X)) | ||
| 129 | ) | ||
| 130 | ) | ||
| 131 | } else { | ||
| 132 | toAddClauses.add( | ||
| 133 | DLClause.create( | ||
| 134 | Array[Atom](Atom.create(AtomicConcept.create(s"${AtomicConcept.NOTHING.getIRI}_final${group(group(i))}"), X)), | ||
| 135 | Array[Atom](Atom.create(AtomicConcept.create(s"${AtomicConcept.NOTHING.getIRI}${i+1}"), X)) | ||
| 136 | ) | ||
| 137 | ) | ||
| 138 | tempQueryRecords(i).dispose() | ||
| 139 | } | ||
| 140 | } | ||
| 141 | } | ||
| 142 | upperProgram updateDependencyGraph toAddClauses | ||
| 143 | } | ||
| 144 | |||
| 145 | val programs: Array[String] = collectTrackingProgramAndImport() | ||
| 146 | if (programs.length > 0) { | ||
| 147 | val datastore = reasoner.trackingStore.getDataStore | ||
| 148 | var oldTripleCount: Long = 0 | ||
| 149 | var tripleCount: Long = 0 | ||
| 150 | try { | ||
| 151 | val t1 = new Timer(); | ||
| 152 | oldTripleCount = datastore.getTriplesCount | ||
| 153 | for(program <- programs) | ||
| 154 | datastore.importRules(program, UpdateType.ScheduleForAddition) | ||
| 155 | datastore applyReasoning true | ||
| 156 | tripleCount = datastore.getTriplesCount | ||
| 157 | |||
| 158 | Utility logDebug s"tracking store after materialising tracking program: $tripleCount (${tripleCount - oldTripleCount} new)" | ||
| 159 | Utility logDebug s"tracking store finished the materialisation of tracking program in ${t1.duration()} seconds." | ||
| 160 | |||
| 161 | extractAxioms() | ||
| 162 | datastore.clearRulesAndMakeFactsExplicit() | ||
| 163 | } catch { | ||
| 164 | case e: JRDFStoreException => e.printStackTrace() | ||
| 165 | case e: OWLOntologyCreationException => e.printStackTrace() | ||
| 166 | } | ||
| 167 | } | ||
| 168 | } | ||
| 169 | } | ||
| 170 | |||
| 171 | /** | ||
| 172 | * | ||
| 173 | * @note provided for compatibility reasons | ||
| 174 | */ | ||
| 175 | val getQueryRecords: Array[QueryRecord] = botQueryRecords | ||
| 176 | |||
| 177 | /** RL lower bound check for satisfiability. */ | ||
| 178 | lazy val checkRLLowerBound: Boolean = { | ||
| 179 | if (isDisposed) throw new DisposedException | ||
| 180 | val record: QueryRecord = queryManager.create(QueryRecord.botQueryText, 0) | ||
| 181 | fullQueryRecord = Some(record) | ||
| 182 | |||
| 183 | var iter: AnswerTuples = null | ||
| 184 | try { | ||
| 185 | iter = reasoner.rlLowerStore.evaluate(record.getQueryText, record.getAnswerVariables) | ||
| 186 | record updateLowerBoundAnswers iter | ||
| 187 | } finally { | ||
| 188 | iter.dispose() | ||
| 189 | } | ||
| 190 | |||
| 191 | if (record.getNoOfSoundAnswers > 0) { | ||
| 192 | Utility logInfo s"Answers to bottom in the lower bound: ${record.outputSoundAnswerTuple}" | ||
| 193 | } | ||
| 194 | record.getNoOfSoundAnswers <= 0 | ||
| 195 | } | ||
| 196 | |||
| 197 | /** ELHO lower bound check for satisfiability */ | ||
| 198 | lazy val checkELLowerBound: Boolean = { | ||
| 199 | if (isDisposed) throw new DisposedException | ||
| 200 | val record: QueryRecord = fullQueryRecord.get | ||
| 201 | |||
| 202 | val answers: AnswerTuples = | ||
| 203 | reasoner.elLowerStore.evaluate( | ||
| 204 | record.getQueryText, | ||
| 205 | record.getAnswerVariables | ||
| 206 | ) | ||
| 207 | record updateLowerBoundAnswers answers | ||
| 208 | |||
| 209 | if (record.getNoOfSoundAnswers > 0) { | ||
| 210 | Utility logInfo s"Answers to bottom in the lower bound: ${record.outputSoundAnswerTuple}" | ||
| 211 | } | ||
| 212 | record.getNoOfSoundAnswers <= 0 | ||
| 213 | } | ||
| 214 | |||
| 215 | /** | ||
| 216 | * | ||
| 217 | */ | ||
| 218 | def checkUpper(upperStore: BasicQueryEngine): Boolean = { | ||
| 219 | if (isDisposed) throw new DisposedException | ||
| 220 | val record = fullQueryRecord.get | ||
| 221 | |||
| 222 | if (upperStore != null) { | ||
| 223 | var tuples: AnswerTuples = null | ||
| 224 | try { | ||
| 225 | tuples = upperStore.evaluate(record.getQueryText, record.getAnswerVariables) | ||
| 226 | if (!tuples.isValid) { | ||
| 227 | Utility logInfo s"There are no contradictions derived in ${upperStore.getName} materialisation." | ||
| 228 | Utility logDebug "The ontology and dataset is satisfiable." | ||
| 229 | return true | ||
| 230 | } | ||
| 231 | } finally { | ||
| 232 | if (tuples != null) tuples.dispose() | ||
| 233 | } | ||
| 234 | } | ||
| 235 | false | ||
| 236 | } | ||
| 237 | |||
| 238 | /** True if the KB associate with the [[reasoner]] is consistent. | ||
| 239 | * | ||
| 240 | * This is the main entry point of the consistency checker. | ||
| 241 | */ | ||
| 242 | lazy val check: Boolean = { | ||
| 243 | if (isDisposed) throw new DisposedException | ||
| 244 | val record = fullQueryRecord.get | ||
| 245 | var tuples: AnswerTuples = null | ||
| 246 | |||
| 247 | try { | ||
| 248 | tuples = reasoner.trackingStore.evaluate( | ||
| 249 | record.getQueryText, | ||
| 250 | record.getAnswerVariables | ||
| 251 | ) | ||
| 252 | record updateUpperBoundAnswers tuples | ||
| 253 | } finally { | ||
| 254 | if (tuples != null) tuples.dispose() | ||
| 255 | } | ||
| 256 | |||
| 257 | var satisfiability = true | ||
| 258 | if (record.getNoOfCompleteAnswers != 0) { | ||
| 259 | |||
| 260 | extractBottomFragment() | ||
| 261 | |||
| 262 | try { | ||
| 263 | extractAxioms4Full() | ||
| 264 | } catch { | ||
| 265 | case e: OWLOntologyCreationException => e.printStackTrace() | ||
| 266 | } | ||
| 267 | |||
| 268 | var checker: Checker = null | ||
| 269 | for (r <- getQueryRecords) { | ||
| 270 | checker = new HermitSummaryFilter(r, true) | ||
| 271 | satisfiability = checker.isConsistent() | ||
| 272 | checker.dispose() | ||
| 273 | } | ||
| 274 | } | ||
| 275 | satisfiability | ||
| 276 | } | ||
| 277 | |||
| 278 | /** | ||
| 279 | * | ||
| 280 | */ | ||
| 281 | private def extractAxioms4Full(): Unit = { | ||
| 282 | val manager: OWLOntologyManager = | ||
| 283 | reasoner.encoder.get | ||
| 284 | .getProgram | ||
| 285 | .getOntology | ||
| 286 | .getOWLOntologyManager | ||
| 287 | val fullOntology: OWLOntology = manager.createOntology() | ||
| 288 | for (record <- botQueryRecords) { | ||
| 289 | for (clause <- record.getRelevantClauses.asScala) { | ||
| 290 | fullQueryRecord.get addRelevantClauses clause | ||
| 291 | } | ||
| 292 | manager.addAxioms( | ||
| 293 | fullOntology, | ||
| 294 | record.getRelevantOntology.getAxioms() | ||
| 295 | ) | ||
| 296 | } | ||
| 297 | fullQueryRecord.get.setRelevantOntology(fullOntology) | ||
| 298 | } | ||
| 299 | |||
| 300 | /** | ||
| 301 | * | ||
| 302 | */ | ||
| 303 | private def extractAxioms(): Unit = { | ||
| 304 | val manager: OWLOntologyManager = | ||
| 305 | reasoner.encoder.get | ||
| 306 | .getProgram | ||
| 307 | .getOntology | ||
| 308 | .getOWLOntologyManager | ||
| 309 | for (record <- botQueryRecords) { | ||
| 310 | record setRelevantOntology manager.createOntology() | ||
| 311 | val tracker: QueryTracker = new QueryTracker(reasoner.encoder.get, reasoner.rlLowerStore, record) | ||
| 312 | reasoner.encoder.get setCurrentQuery record | ||
| 313 | tracker extractAxioms reasoner.trackingStore | ||
| 314 | Utility logInfo s"finish extracting axioms for bottom ${record.getQueryID}" | ||
| 315 | } | ||
| 316 | } | ||
| 317 | |||
| 318 | /** | ||
| 319 | * | ||
| 320 | */ | ||
| 321 | private def collectTrackingProgramAndImport(): Array[String] = { | ||
| 322 | val programs = new Array[String](botQueryRecords.length) | ||
| 323 | val encoder = reasoner.encoder.get | ||
| 324 | val currentClauses: LinkedList[DLClause] = new LinkedList[DLClause]() | ||
| 325 | |||
| 326 | botQueryRecords.zipWithIndex map { case (record,i) => { | ||
| 327 | encoder setCurrentQuery record | ||
| 328 | val builder = new StringBuilder(encoder.getTrackingProgram) | ||
| 329 | val currentClauses = toAddClauses.asScala.filter{clause => | ||
| 330 | clause.getHeadAtom(0).getDLPredicate.toString().contains(s"_final${i + 1}") | ||
| 331 | }.asJava | ||
| 332 | builder append (DLClauseHelper toString currentClauses) | ||
| 333 | builder.toString | ||
| 334 | }} | ||
| 335 | } | ||
| 336 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/reasoner/RSACombQueryReasoner.scala b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/RSACombQueryReasoner.scala new file mode 100644 index 0000000..5acc7cd --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/RSACombQueryReasoner.scala | |||
| @@ -0,0 +1,130 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua.reasoner | ||
| 18 | |||
| 19 | import java.util.Collection; | ||
| 20 | import scala.collection.JavaConverters._ | ||
| 21 | |||
| 22 | import org.semanticweb.owlapi.model.OWLOntology | ||
| 23 | import uk.ac.ox.cs.rsacomb.approximation.{Approximation,Lowerbound} | ||
| 24 | import uk.ac.ox.cs.rsacomb.ontology.{Ontology,RSAOntology} | ||
| 25 | import uk.ac.ox.cs.pagoda.query.QueryRecord | ||
| 26 | import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner | ||
| 27 | import uk.ac.ox.cs.acqua.approximation.Noop | ||
| 28 | |||
| 29 | class RSACombQueryReasoner( | ||
| 30 | val origin: Ontology, | ||
| 31 | val toRSA: Approximation[RSAOntology] = Noop | ||
| 32 | ) extends QueryReasoner { | ||
| 33 | |||
| 34 | /* Implicit compatibility between PAGOdA and RSAComb types */ | ||
| 35 | import uk.ac.ox.cs.acqua.implicits.PagodaConverters._ | ||
| 36 | |||
| 37 | val rsa: RSAOntology = origin approximate toRSA | ||
| 38 | |||
| 39 | /** Doesn't perform any action. | ||
| 40 | * | ||
| 41 | * @note Implemented for compatibility with other reasoners. | ||
| 42 | */ | ||
| 43 | def loadOntology(ontology: OWLOntology): Unit = { | ||
| 44 | /* Nothing to do */ | ||
| 45 | } | ||
| 46 | |||
| 47 | /** Force computation of canonical model for combined approach in RSA. | ||
| 48 | * | ||
| 49 | * @returns whether the original ontolgy is RSA. | ||
| 50 | * | ||
| 51 | * @note that it is not necessary to call this method since the | ||
| 52 | * preprocessing is performed "on demand" when evaluating a query. | ||
| 53 | */ | ||
| 54 | def preprocess(): Boolean = { | ||
| 55 | rsa.computeCanonicalModel() | ||
| 56 | origin.isRSA | ||
| 57 | } | ||
| 58 | |||
| 59 | /** Check consistency and returns whether the ontology is RSA. | ||
| 60 | * | ||
| 61 | * Preprocessing is performed on instance creation, along with | ||
| 62 | * consistency checking, so no actual work is being done here. | ||
| 63 | * | ||
| 64 | * @note Implemented for compatibility with other reasoners. | ||
| 65 | */ | ||
| 66 | def isConsistent(): Boolean = { | ||
| 67 | origin.isRSA | ||
| 68 | } | ||
| 69 | |||
| 70 | /** Evaluates a collection of queries. | ||
| 71 | * | ||
| 72 | * Uses RSAComb internally to reuse part of the computation of | ||
| 73 | * multiple calls to [[uk.ac.ox.cs.rsacomb.RSAOntology.ask]]. | ||
| 74 | * | ||
| 75 | * TODO: perform logging of answers | ||
| 76 | */ | ||
| 77 | //override def evaluate(queries: Collection[QueryRecord]): Unit = { | ||
| 78 | // val answers = rsa ask queries | ||
| 79 | // /* Perform logging */ | ||
| 80 | // // Logger write answers | ||
| 81 | // // Logger.generateSimulationScripts(datapath, queries) | ||
| 82 | //} | ||
| 83 | |||
| 84 | /** Evaluates a single query. | ||
| 85 | * | ||
| 86 | * Uses RSAComb internally to reuse part of the computation of | ||
| 87 | * multiple calls to [[uk.ac.ox.cs.rsacomb.RSAOntology.ask]]. | ||
| 88 | * | ||
| 89 | * TODO: perform logging of answers | ||
| 90 | */ | ||
| 91 | def evaluate(query: QueryRecord): Unit = { | ||
| 92 | import uk.ac.ox.cs.acqua.implicits.RSACombAnswerTuples._ | ||
| 93 | val answers = rsa ask query | ||
| 94 | query updateLowerBoundAnswers answers | ||
| 95 | if (toRSA == Noop) { | ||
| 96 | /* Perform logging | ||
| 97 | * In this case the engine is used as a standalone engine, meaning | ||
| 98 | * that it is time to print out query answers and other related | ||
| 99 | * logging routines. | ||
| 100 | */ | ||
| 101 | //Logger write answers | ||
| 102 | //Logger.generateSimulationScripts(datapath, queries) | ||
| 103 | } | ||
| 104 | } | ||
| 105 | |||
| 106 | /** Evaluates a single query. | ||
| 107 | * | ||
| 108 | * Uses RSAComb internally to reuse part of the computation of | ||
| 109 | * multiple calls to [[uk.ac.ox.cs.rsacomb.RSAOntology.ask]]. | ||
| 110 | * | ||
| 111 | * @note the result of the computation is saved in the "upper bound" | ||
| 112 | * of the input query record. | ||
| 113 | * | ||
| 114 | * TODO: perform logging of answers | ||
| 115 | */ | ||
| 116 | def evaluateUpper(query: QueryRecord): Unit = { | ||
| 117 | import uk.ac.ox.cs.acqua.implicits.RSACombAnswerTuples._ | ||
| 118 | val answers = rsa ask query | ||
| 119 | query updateUpperBoundAnswers answers | ||
| 120 | if (toRSA == Noop) { | ||
| 121 | /* Perform logging | ||
| 122 | * In this case the engine is used as a standalone engine, meaning | ||
| 123 | * that it is time to print out query answers and other related | ||
| 124 | * logging routines. | ||
| 125 | */ | ||
| 126 | //Logger write answers | ||
| 127 | //Logger.generateSimulationScripts(datapath, queries) | ||
| 128 | } | ||
| 129 | } | ||
| 130 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/util/AcquaConfig.scala b/src/main/scala/uk/ac/ox/cs/acqua/util/AcquaConfig.scala new file mode 100644 index 0000000..5417b77 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/acqua/util/AcquaConfig.scala | |||
| @@ -0,0 +1,207 @@ | |||
| 1 | /* | ||
| 2 | * Copyright 2021,2022 KRR Oxford | ||
| 3 | * | ||
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | * you may not use this file except in compliance with the License. | ||
| 6 | * You may obtain a copy of the License at | ||
| 7 | * | ||
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | * | ||
| 10 | * Unless required by applicable law or agreed to in writing, software | ||
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | * See the License for the specific language governing permissions and | ||
| 14 | * limitations under the License. | ||
| 15 | */ | ||
| 16 | |||
| 17 | package uk.ac.ox.cs.acqua.util | ||
| 18 | |||
| 19 | import scala.collection.mutable.Map | ||
| 20 | import uk.ac.ox.cs.rsacomb.util.Logger | ||
| 21 | |||
| 22 | case class AcquaOption[+T](opt: T) { | ||
| 23 | def get[T]: T = opt.asInstanceOf[T] | ||
| 24 | } | ||
| 25 | |||
| 26 | /** Command line options for ACQuA | ||
| 27 | * | ||
| 28 | * TODO: integrate with PAGOdA's Logger. | ||
| 29 | * The ideal situation would be to add a Logger interface to PAGOdA and | ||
| 30 | * make everything dependent on it so that this Logger can be | ||
| 31 | * implemented as an instance of the interface. | ||
| 32 | */ | ||
| 33 | object AcquaConfig { | ||
| 34 | type Config = Map[Symbol, AcquaOption[Any]] | ||
| 35 | |||
| 36 | private implicit def toRSAOption[T](opt: T) = AcquaOption[T](opt) | ||
| 37 | |||
| 38 | /** Help message */ | ||
| 39 | private val help: String = """ | ||
| 40 | |||
| 41 | acqua - conjunctive query answering over unrestricted ontologies | ||
| 42 | |||
| 43 | USAGE | ||
| 44 | acqua [OPTIONS] | ||
| 45 | |||
| 46 | -h | -? | --help | ||
| 47 | print this help message | ||
| 48 | |||
| 49 | -l | --logger <level> | ||
| 50 | specify the logger verbosity. Values are: quiet, normal (default), | ||
| 51 | debug, verbose. | ||
| 52 | |||
| 53 | -a | --answers <file> | ||
| 54 | path to the output file for the answers to the query (in JSON | ||
| 55 | format) | ||
| 56 | |||
| 57 | -q | --queries <file> | ||
| 58 | path to a file containing a single SPARQL query. If no query | ||
| 59 | is provided, only the approximation to RSA will be performed. | ||
| 60 | |||
| 61 | -o | --ontology <file> | ||
| 62 | ontology file in OWL format. | ||
| 63 | |||
| 64 | -d | --data <file> | ||
| 65 | data files to be used alongside the ontology file. If a | ||
| 66 | directory is provided, all files in the directory (recursively) | ||
| 67 | will be considered. | ||
| 68 | |||
| 69 | -t | --transitive | ||
| 70 | "upperbound" approximation specific option. Include property chain | ||
| 71 | axioms (and hence the more common transitive properties) when | ||
| 72 | computing the canonical model. | ||
| 73 | |||
| 74 | """ | ||
| 75 | |||
| 76 | /** Default config values */ | ||
| 77 | private val default: Config = Map( | ||
| 78 | 'transitive -> false, | ||
| 79 | 'data -> List.empty[os.Path], | ||
| 80 | ) | ||
| 81 | |||
| 82 | /** Parse a string into a path. | ||
| 83 | * | ||
| 84 | * @throws an [[IllegalArgumentException]] on malformed path. | ||
| 85 | */ | ||
| 86 | private def getPath(str: String): os.Path = | ||
| 87 | try { | ||
| 88 | os.Path(str, base = os.pwd) | ||
| 89 | } catch { | ||
| 90 | case e: IllegalArgumentException => | ||
| 91 | exit(s"'$str' is not a well formed path.") | ||
| 92 | } | ||
| 93 | |||
| 94 | /** Utility to exit the program with a custom message on stderr. | ||
| 95 | * | ||
| 96 | * The program will exit with error after printing the help message. | ||
| 97 | * | ||
| 98 | * @param msg message printed to stderr. | ||
| 99 | * @param errno error code number (defaults to 1) | ||
| 100 | */ | ||
| 101 | private def exit(msg: String, errno: Int = 1): Nothing = { | ||
| 102 | System.err.println(msg) | ||
| 103 | System.err.println(help) | ||
| 104 | sys.exit(errno) | ||
| 105 | } | ||
| 106 | |||
| 107 | /** Parse arguments with default options. | ||
| 108 | * | ||
| 109 | * @param args arguments list | ||
| 110 | * @return map of config options | ||
| 111 | */ | ||
| 112 | def parse(args: List[String]): Config = parse(args, default) | ||
| 113 | |||
| 114 | /** Parse arguments. | ||
| 115 | * | ||
| 116 | * @param args arguments list | ||
| 117 | * @param config default configuration | ||
| 118 | * @return map of config options | ||
| 119 | */ | ||
| 120 | def parse(args: List[String], config: Config): Config = { | ||
| 121 | args match { | ||
| 122 | case Nil => finalise(config) | ||
| 123 | case flag @ ("-h" | "-?" | "--help") :: _ => { | ||
| 124 | println(help) | ||
| 125 | sys.exit(0) | ||
| 126 | } | ||
| 127 | case flag @ ("-l" | "--logger") :: _level :: tail => { | ||
| 128 | val level = _level match { | ||
| 129 | case "quiet" => Logger.QUIET | ||
| 130 | case "debug" => Logger.DEBUG | ||
| 131 | case "verbose" => Logger.VERBOSE | ||
| 132 | case _ => Logger.NORMAL | ||
| 133 | } | ||
| 134 | parse(tail, config += ('logger -> level)) | ||
| 135 | } | ||
| 136 | case flag @ ("-a" | "--answers") :: answers :: tail => | ||
| 137 | parse(tail, config += ('answers -> getPath(answers))) | ||
| 138 | case flag @ ("-t" | "--transitive") :: tail => | ||
| 139 | parse(tail, config += ('transitive -> true)) | ||
| 140 | case flag @ ("-q" | "--queries") :: _query :: tail => { | ||
| 141 | val query = getPath(_query) | ||
| 142 | val files = | ||
| 143 | if (os.isFile(query)) | ||
| 144 | List(query) | ||
| 145 | else if (os.isDir(query)) | ||
| 146 | os.walk(query).filter(os.isFile).toList | ||
| 147 | else | ||
| 148 | exit(s"'${_query}' is not a valid path.") | ||
| 149 | parse(tail, config += ('queries -> files)) | ||
| 150 | } | ||
| 151 | case flag @ ("-o" | "--ontology") :: _ontology :: tail => { | ||
| 152 | val ontology = getPath(_ontology) | ||
| 153 | if (!os.isFile(ontology)) | ||
| 154 | exit(s"'${_ontology}' is not a valid filename.") | ||
| 155 | parse(tail, config += ('ontology -> ontology)) | ||
| 156 | } | ||
| 157 | case flag @ ("-d" | "--data") :: _data :: tail => { | ||
| 158 | val data = getPath(_data) | ||
| 159 | val files = | ||
| 160 | if (os.isFile(data)) | ||
| 161 | List(data) | ||
| 162 | else if (os.isDir(data)) { | ||
| 163 | os.walk(data).filter(os.isFile).toList | ||
| 164 | }else | ||
| 165 | exit(s"'${_data}' is not a valid path.") | ||
| 166 | parse(tail, config += ('data -> files) += ('pagodata -> _data)) | ||
| 167 | } | ||
| 168 | case a => exit(s"Invalid sequence of arguments '${a.mkString(" ")}'.") | ||
| 169 | } | ||
| 170 | } | ||
| 171 | |||
| 172 | /** Perform final checks on parsed options. | ||
| 173 | * | ||
| 174 | * @param config a parsed configuration | ||
| 175 | * @returns the input configuration, unchanged | ||
| 176 | */ | ||
| 177 | private def finalise(config: Config): Config = { | ||
| 178 | if (!config.contains('ontology)) | ||
| 179 | exit("The following flag is mandatory: '-o' or '--ontology'.") | ||
| 180 | config | ||
| 181 | } | ||
| 182 | |||
| 183 | /** Generate summary of a config object suitable for printing | ||
| 184 | * | ||
| 185 | * @param config a parsed configuration | ||
| 186 | * @returns a string describing the configuration | ||
| 187 | */ | ||
| 188 | def describe(config: Config): Unit = { | ||
| 189 | config foreach { case (k,v) => k match { | ||
| 190 | case 'logger => Logger print s"Logger level: ${v.get[Logger.Level]}" | ||
| 191 | case 'ontology => Logger print s"Ontology file: ${v.get[os.Path]}" | ||
| 192 | case 'data => { | ||
| 193 | val paths = v.get[List[os.Path]] | ||
| 194 | val ellipsis = if (paths.length > 1) " [...]" else "" | ||
| 195 | Logger print s"Data files: ${paths.headOption.getOrElse("NONE")}$ellipsis" | ||
| 196 | } | ||
| 197 | case 'queries => { | ||
| 198 | val paths = v.get[List[os.Path]] | ||
| 199 | val ellipsis = if (paths.length > 1) " [...]" else "" | ||
| 200 | Logger print s"Query files: ${paths.headOption.getOrElse("NONE")}$ellipsis" | ||
| 201 | } | ||
| 202 | case 'answers => Logger print s"Path to answers: ${v.get[os.Path]}" | ||
| 203 | case 'transitive => Logger print s"Include property chain axioms: ${v.get[Boolean]}" | ||
| 204 | case _ => { } | ||
| 205 | }} | ||
| 206 | } | ||
| 207 | } | ||
