aboutsummaryrefslogtreecommitdiff
path: root/src/main/scala/uk/ac/ox/cs/acqua
diff options
context:
space:
mode:
authorFederico Igne <federico.igne@cs.ox.ac.uk>2022-05-12 18:30:37 +0100
committerFederico Igne <federico.igne@cs.ox.ac.uk>2022-05-12 18:31:01 +0100
commitdebbced3fbcbfd4eeac55ce852213a2286ab0aab (patch)
tree31c167a8f0d1df3957a27dbd200c945f2ef13d95 /src/main/scala/uk/ac/ox/cs/acqua
parent076e470e620b3e80a48438ea59f17775dcc88f9d (diff)
downloadACQuA-debbced3fbcbfd4eeac55ce852213a2286ab0aab.tar.gz
ACQuA-debbced3fbcbfd4eeac55ce852213a2286ab0aab.zip
Add preprocessing and initial computation for CQ answering
Diffstat (limited to 'src/main/scala/uk/ac/ox/cs/acqua')
-rw-r--r--src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala282
1 files changed, 138 insertions, 144 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala
index 3e65218..4fe32d8 100644
--- a/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala
+++ b/src/main/scala/uk/ac/ox/cs/acqua/reasoner/AcquaQueryReasoner.scala
@@ -16,50 +16,53 @@
16 16
17package uk.ac.ox.cs.acqua.reasoner 17package uk.ac.ox.cs.acqua.reasoner
18 18
19import org.semanticweb.karma2.profile.ELHOProfile; 19import scala.collection.JavaConverters._
20import org.semanticweb.owlapi.model.OWLOntology; 20import org.semanticweb.karma2.profile.ELHOProfile
21import org.semanticweb.owlapi.model.OWLOntology
21// import org.semanticweb.owlapi.model.parameters.Imports; 22// import org.semanticweb.owlapi.model.parameters.Imports;
22// import uk.ac.ox.cs.JRDFox.JRDFStoreException; 23// import uk.ac.ox.cs.JRDFox.JRDFStoreException;
23import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine 24import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine
24// import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; 25// import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
25// import uk.ac.ox.cs.pagoda.owl.OWLHelper; 26import uk.ac.ox.cs.pagoda.owl.OWLHelper
26// import uk.ac.ox.cs.pagoda.query.AnswerTuples; 27import uk.ac.ox.cs.pagoda.query.{
27// import uk.ac.ox.cs.pagoda.query.GapByStore4ID; 28 AnswerTuples,
28// import uk.ac.ox.cs.pagoda.query.GapByStore4ID2; 29 GapByStore4ID,
29import uk.ac.ox.cs.pagoda.query.QueryRecord 30 GapByStore4ID2,
30// import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; 31 QueryRecord,
31import uk.ac.ox.cs.pagoda.reasoner.{ConsistencyManager,MyQueryReasoner,QueryReasoner} 32}
33import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
34import uk.ac.ox.cs.pagoda.reasoner.{
35 ConsistencyManager,
36 MyQueryReasoner,
37 QueryReasoner
38}
32import uk.ac.ox.cs.pagoda.reasoner.light.{KarmaQueryEngine,BasicQueryEngine} 39import uk.ac.ox.cs.pagoda.reasoner.light.{KarmaQueryEngine,BasicQueryEngine}
33import uk.ac.ox.cs.pagoda.rules.DatalogProgram 40import uk.ac.ox.cs.pagoda.rules.DatalogProgram
34// import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; 41// import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter;
35// import uk.ac.ox.cs.pagoda.tracking.QueryTracker; 42// import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
36// import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; 43import uk.ac.ox.cs.pagoda.tracking.{
37// import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderDisjVar1; 44 TrackingRuleEncoder,
38// import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap; 45 TrackingRuleEncoderDisjVar1,
46 TrackingRuleEncoderWithGap,
47}
39// import uk.ac.ox.cs.pagoda.util.ExponentialInterpolation; 48// import uk.ac.ox.cs.pagoda.util.ExponentialInterpolation;
40// import uk.ac.ox.cs.pagoda.util.PagodaProperties; 49// import uk.ac.ox.cs.pagoda.util.PagodaProperties;
41import uk.ac.ox.cs.pagoda.util.Timer; 50import uk.ac.ox.cs.pagoda.util.Timer
42import uk.ac.ox.cs.pagoda.util.Utility 51import uk.ac.ox.cs.pagoda.util.Utility
43// import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; 52// import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
44// import uk.ac.ox.cs.pagoda.util.tuples.Tuple; 53import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
45import uk.ac.ox.cs.rsacomb.ontology.Ontology 54import uk.ac.ox.cs.rsacomb.ontology.Ontology
55import uk.ac.ox.cs.rsacomb.approximation.{Lowerbound,Upperbound}
46 56
47// import java.util.Collection; 57// import java.util.Collection;
48// import java.util.LinkedList; 58// import java.util.LinkedList;
49 59
50class AcquaQueryReasoner(var ontology: Ontology) 60class AcquaQueryReasoner(val ontology: Ontology)
51 extends QueryReasoner { 61 extends QueryReasoner {
52 62
53// OWLOntology ontology; 63 private var encoder: Option[TrackingRuleEncoder] = None
54// OWLOntology elho_ontology;
55// DatalogProgram program;
56
57 private var lazyUpperStore: Option[MultiStageQueryEngine] = None; 64 private var lazyUpperStore: Option[MultiStageQueryEngine] = None;
58// TrackingRuleEncoder encoder;
59
60 65
61// private Collection<String> predicatesWithGap = null;
62//// private int relevantOntologiesCounter = 0;
63 private val timer: Timer = new Timer(); 66 private val timer: Timer = new Timer();
64 67
65 private var _isConsistent: ConsistencyStatus = StatusUnchecked 68 private var _isConsistent: ConsistencyStatus = StatusUnchecked
@@ -69,9 +72,13 @@ class AcquaQueryReasoner(var ontology: Ontology)
69 72
70 private val rlLowerStore: BasicQueryEngine = new BasicQueryEngine("rl-lower-bound") 73 private val rlLowerStore: BasicQueryEngine = new BasicQueryEngine("rl-lower-bound")
71 private val elLowerStore: KarmaQueryEngine = new KarmaQueryEngine("elho-lower-bound") 74 private val elLowerStore: KarmaQueryEngine = new KarmaQueryEngine("elho-lower-bound")
75 private lazy val lowerRSAOntology = ontology approximate (new Lowerbound)
76 private lazy val upperRSAOntology = ontology approximate (new Upperbound)
72 77
73 private val trackingStore = new MultiStageQueryEngine("tracking", false); 78 private val trackingStore = new MultiStageQueryEngine("tracking", false);
74 79
80 var predicatesWithGap: Seq[String] = Seq.empty
81
75 /* Load ontology into PAGOdA */ 82 /* Load ontology into PAGOdA */
76 private val datalog = new DatalogProgram(ontology.origin); 83 private val datalog = new DatalogProgram(ontology.origin);
77 //datalog.getGeneral().save(); 84 //datalog.getGeneral().save();
@@ -91,8 +98,80 @@ class AcquaQueryReasoner(var ontology: Ontology)
91 */ 98 */
92 def loadOntology(ontology: OWLOntology): Unit = { } 99 def loadOntology(ontology: OWLOntology): Unit = { }
93 100
101 /** Preprocessing of input ontology.
102 *
103 * This is mostly PAGOdA related. Note that, while most of the
104 * computation in RSAComb is performed "on-demand", we are forcing
105 * the approximation from above/below of the input ontology to RSA,
106 * and the compuation of their respective canonical models to make timing
107 * measured more consistent.
108 *
109 * @returns whether the input ontology is found consistent after the
110 * preprocessing phase.
111 */
94 def preprocess(): Boolean = { 112 def preprocess(): Boolean = {
95 ??? 113 timer.reset();
114 Utility logInfo "Preprocessing (and checking satisfiability)..."
115
116 val name = "data"
117 val datafile = getImportedData()
118
119 /* RL lower-bound check */
120 rlLowerStore.importRDFData(name, datafile);
121 rlLowerStore.materialise("lower program", datalog.getLower.toString);
122 if (!consistencyManager.checkRLLowerBound()) {
123 Utility logDebug s"time for satisfiability checking: ${timer.duration()}"
124 _isConsistent = StatusInconsistent
125 return false
126 }
127 Utility logDebug s"The number of 'sameAs' assertions in RL lower store: ${rlLowerStore.getSameAsNumber}"
128
129 /* EHLO lower bound check */
130 val originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology.origin)
131 elLowerStore.importRDFData(name, datafile);
132 elLowerStore.materialise("saturate named individuals", originalMarkProgram);
133 elLowerStore.materialise("lower program", datalog.getLower.toString);
134 elLowerStore.initialiseKarma();
135 if (!consistencyManager.checkELLowerBound()) {
136 Utility logDebug s"time for satisfiability checking: ${timer.duration()}"
137 _isConsistent = StatusInconsistent
138 return false
139 }
140
141 /* Lazy upper store */
142 val tag = lazyUpperStore.map(store => {
143 store.importRDFData(name, datafile)
144 store.materialise("saturate named individuals", originalMarkProgram)
145 store.materialiseRestrictedly(datalog, null)
146 }).getOrElse(1)
147 if (tag == -1) {
148 Utility logDebug s"time for satisfiability checking: ${timer.duration()}"
149 _isConsistent = StatusInconsistent
150 return false
151 }
152 lazyUpperStore.flatMap(store => { store.dispose(); None })
153
154 trackingStore.importRDFData(name, datafile)
155 trackingStore.materialise("saturate named individuals", originalMarkProgram)
156 val gap: GapByStore4ID = new GapByStore4ID2(trackingStore, rlLowerStore);
157 trackingStore.materialiseFoldedly(datalog, gap);
158 this.predicatesWithGap = gap.getPredicatesWithGap.asScala.toSeq;
159 gap.clear();
160
161 if (datalog.getGeneral.isHorn)
162 encoder = Some(new TrackingRuleEncoderWithGap(datalog.getUpper, trackingStore))
163 else
164 encoder = Some(new TrackingRuleEncoderDisjVar1(datalog.getUpper, trackingStore))
165
166 /* Perform consistency checking if not already inconsistent */
167 if (!isConsistent()) return false
168 consistencyManager.extractBottomFragment();
169
170 /* Force computation of lower/upper RSA approximations */
171 lowerRSAOntology//.computeCanonicalModel()
172 upperRSAOntology//.computeCanonicalModel()
173
174 true
96 } 175 }
97 176
98 /** Returns a the consistency status of the ontology. 177 /** Returns a the consistency status of the ontology.
@@ -112,99 +191,8 @@ class AcquaQueryReasoner(var ontology: Ontology)
112 } 191 }
113 192
114 def evaluate(query: QueryRecord): Unit = { 193 def evaluate(query: QueryRecord): Unit = {
115 ??? 194 if(queryLowerAndUpperBounds(query))
116 } 195 return;
117
118 def evaluateUpper(record: QueryRecord): Unit= ???
119
120// public Collection<String> getPredicatesWithGap() {
121// if(isDisposed()) throw new DisposedException();
122// return predicatesWithGap;
123// }
124
125// @Override
126// public boolean preprocess() {
127// if(isDisposed()) throw new DisposedException();
128
129// t.reset();
130// Utility.logInfo("Preprocessing (and checking satisfiability)...");
131
132// String name = "data", datafile = getImportedData();
133// rlLowerStore.importRDFData(name, datafile);
134// rlLowerStore.materialise("lower program", program.getLower().toString());
135//// program.getLower().save();
136// if(!consistency.checkRLLowerBound()) {
137// Utility.logDebug("time for satisfiability checking: " + t.duration());
138// isConsistent = ConsistencyStatus.INCONSISTENT;
139// return false;
140// }
141// Utility.logDebug("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber());
142
143// String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
144
145// elLowerStore.importRDFData(name, datafile);
146// elLowerStore.materialise("saturate named individuals", originalMarkProgram);
147// elLowerStore.materialise("lower program", program.getLower().toString());
148// elLowerStore.initialiseKarma();
149// if(!consistency.checkELLowerBound()) {
150// Utility.logDebug("time for satisfiability checking: " + t.duration());
151// isConsistent = ConsistencyStatus.INCONSISTENT;
152// return false;
153// }
154
155// if(lazyUpperStore != null) {
156// lazyUpperStore.importRDFData(name, datafile);
157// lazyUpperStore.materialise("saturate named individuals", originalMarkProgram);
158// int tag = lazyUpperStore.materialiseRestrictedly(program, null);
159// if(tag == -1) {
160// Utility.logDebug("time for satisfiability checking: " + t.duration());
161// isConsistent = ConsistencyStatus.INCONSISTENT;
162// return false;
163// }
164// else if(tag != 1) {
165// lazyUpperStore.dispose();
166// lazyUpperStore = null;
167// }
168// }
169// if(consistency.checkUpper(lazyUpperStore)) {
170// isConsistent = ConsistencyStatus.CONSISTENT;
171// Utility.logDebug("time for satisfiability checking: " + t.duration());
172// }
173
174// trackingStore.importRDFData(name, datafile);
175// trackingStore.materialise("saturate named individuals", originalMarkProgram);
176
177//// materialiseFullUpper();
178//// GapByStore4ID gap = new GapByStore4ID(trackingStore);
179// GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore);
180// trackingStore.materialiseFoldedly(program, gap);
181// predicatesWithGap = gap.getPredicatesWithGap();
182// gap.clear();
183
184// if(program.getGeneral().isHorn())
185// encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore);
186// else
187// encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore);
188//// encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore);
189//// encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore);
190//// encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore);
191
192// // TODO? add consistency check by Skolem-upper-bound
193
194// if(!isConsistent())
195// return false;
196
197// consistency.extractBottomFragment();
198
199// return true;
200// }
201
202// @Override
203// public void evaluate(QueryRecord queryRecord) {
204// if(isDisposed()) throw new DisposedException();
205
206// if(queryLowerAndUpperBounds(queryRecord))
207// return;
208 196
209// OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); 197// OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord);
210 198
@@ -237,6 +225,13 @@ class AcquaQueryReasoner(var ontology: Ontology)
237// if(properties.getToCallHermiT()) 225// if(properties.getToCallHermiT())
238// queryRecord.markAsProcessed(); 226// queryRecord.markAsProcessed();
239// summarisedChecker.dispose(); 227// summarisedChecker.dispose();
228 ???
229 }
230
231 def evaluateUpper(record: QueryRecord): Unit= ???
232
233// @Override
234// public void evaluate(QueryRecord queryRecord) {
240// } 235// }
241 236
242// @Override 237// @Override
@@ -306,32 +301,31 @@ class AcquaQueryReasoner(var ontology: Ontology)
306// return false; 301// return false;
307// } 302// }
308 303
309// /** 304 /**
310// * Returns the part of the ontology relevant for Hermit, while computing the bound answers. 305 * Returns the part of the ontology relevant for Hermit, while computing the bound answers.
311// */ 306 */
312// private boolean queryLowerAndUpperBounds(QueryRecord queryRecord) { 307 private def queryLowerAndUpperBounds(query: QueryRecord): Boolean = {
313 308 Utility logInfo ">> Base bounds <<"
314// Utility.logInfo(">> Base bounds <<"); 309 val extendedQueryTexts: Tuple[String] = query.getExtendedQueryText()
315 310 var rlAnswer: AnswerTuples = null
316// AnswerTuples rlAnswer = null, elAnswer = null; 311 var elAnswer: AnswerTuples = null
317 312
318// t.reset(); 313 /* Computing RL lower bound answers */
319// try { 314 timer.reset();
320// rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 315 try {
321// Utility.logDebug(t.duration()); 316 rlAnswer = rlLowerStore.evaluate(query.getQueryText, query.getAnswerVariables)
322// queryRecord.updateLowerBoundAnswers(rlAnswer); 317 Utility logDebug timer.duration()
323// } finally { 318 query updateLowerBoundAnswers rlAnswer
324// if(rlAnswer != null) rlAnswer.dispose(); 319 } finally {
325// } 320 if (rlAnswer != null) rlAnswer.dispose()
326// queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); 321 }
327 322 query.addProcessingTime(Step.LOWER_BOUND, timer.duration());
328// Tuple<String> extendedQueryTexts = queryRecord.getExtendedQueryText();
329 323
330// if(properties.getUseAlwaysSimpleUpperBound() || lazyUpperStore == null) { 324 if(properties.getUseAlwaysSimpleUpperBound() || lazyUpperStore.isEmpty) {
331// Utility.logDebug("Tracking store"); 325 Utility logDebug "Tracking store"
332// if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) 326 // if (queryUpperStore(trackingStore, query, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND))
333// return true; 327 // return true;
334// } 328 }
335 329
336// if(!queryRecord.isBottom()) { 330// if(!queryRecord.isBottom()) {
337// Utility.logDebug("Lazy store"); 331// Utility.logDebug("Lazy store");
@@ -356,8 +350,8 @@ class AcquaQueryReasoner(var ontology: Ontology)
356// return true; 350// return true;
357// } 351// }
358 352
359// return false; 353 return false;
360// } 354 }
361 355
362// private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { 356// private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) {
363// Utility.logInfo(">> Relevant ontology-subset extraction <<"); 357// Utility.logInfo(">> Relevant ontology-subset extraction <<");