diff options
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java')
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | 472 |
1 files changed, 0 insertions, 472 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java deleted file mode 100644 index 8cc9209..0000000 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ /dev/null | |||
| @@ -1,472 +0,0 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import org.semanticweb.karma2.profile.ELHOProfile; | ||
| 4 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 5 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 6 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; | ||
| 7 | import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; | ||
| 8 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | ||
| 9 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 10 | import uk.ac.ox.cs.pagoda.query.GapByStore4ID; | ||
| 11 | import uk.ac.ox.cs.pagoda.query.GapByStore4ID2; | ||
| 12 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 13 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; | ||
| 14 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 15 | import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; | ||
| 16 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | ||
| 17 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; | ||
| 18 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | ||
| 19 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; | ||
| 20 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderDisjVar1; | ||
| 21 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap; | ||
| 22 | import uk.ac.ox.cs.pagoda.util.ExponentialInterpolation; | ||
| 23 | import uk.ac.ox.cs.pagoda.util.PagodaProperties; | ||
| 24 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 25 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 26 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 27 | import uk.ac.ox.cs.pagoda.util.tuples.Tuple; | ||
| 28 | |||
| 29 | import java.util.Collection; | ||
| 30 | import java.util.LinkedList; | ||
| 31 | |||
| 32 | class MyQueryReasoner extends QueryReasoner { | ||
| 33 | |||
| 34 | OWLOntology ontology; | ||
| 35 | OWLOntology elho_ontology; | ||
| 36 | DatalogProgram program; | ||
| 37 | |||
| 38 | BasicQueryEngine rlLowerStore = null; | ||
| 39 | KarmaQueryEngine elLowerStore = null; | ||
| 40 | MultiStageQueryEngine lazyUpperStore = null; | ||
| 41 | MultiStageQueryEngine trackingStore = null; | ||
| 42 | TrackingRuleEncoder encoder; | ||
| 43 | |||
| 44 | private boolean equalityTag; | ||
| 45 | private Timer t = new Timer(); | ||
| 46 | |||
| 47 | private Collection<String> predicatesWithGap = null; | ||
| 48 | private ConsistencyStatus isConsistent; | ||
| 49 | private ConsistencyManager consistency = new ConsistencyManager(this); | ||
| 50 | // private int relevantOntologiesCounter = 0; | ||
| 51 | |||
| 52 | public MyQueryReasoner() { | ||
| 53 | setup(true); | ||
| 54 | } | ||
| 55 | |||
| 56 | public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | ||
| 57 | if(!multiStageTag) | ||
| 58 | throw new IllegalArgumentException( | ||
| 59 | "Value \"true\" for parameter \"multiStageTag\" is no longer supported"); | ||
| 60 | |||
| 61 | setup(considerEqualities); | ||
| 62 | } | ||
| 63 | |||
| 64 | @Override | ||
| 65 | public void loadOntology(OWLOntology o) { | ||
| 66 | if(isDisposed()) throw new DisposedException(); | ||
| 67 | if(!equalityTag) { | ||
| 68 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | ||
| 69 | o = eliminator.getOutputOntology(); | ||
| 70 | eliminator.save(); | ||
| 71 | } | ||
| 72 | |||
| 73 | ontology = o; | ||
| 74 | program = new DatalogProgram(ontology); | ||
| 75 | // program.getLower().save(); | ||
| 76 | // program.getUpper().save(); | ||
| 77 | program.getGeneral().save(); | ||
| 78 | |||
| 79 | if(!program.getGeneral().isHorn()) | ||
| 80 | lazyUpperStore = new MultiStageQueryEngine("lazy-upper-bound", true); | ||
| 81 | |||
| 82 | importData(program.getAdditionalDataFile()); | ||
| 83 | |||
| 84 | elho_ontology = new ELHOProfile().getFragment(ontology); | ||
| 85 | elLowerStore.processOntology(elho_ontology); | ||
| 86 | } | ||
| 87 | |||
| 88 | public Collection<String> getPredicatesWithGap() { | ||
| 89 | if(isDisposed()) throw new DisposedException(); | ||
| 90 | return predicatesWithGap; | ||
| 91 | } | ||
| 92 | |||
| 93 | @Override | ||
| 94 | public boolean preprocess() { | ||
| 95 | if(isDisposed()) throw new DisposedException(); | ||
| 96 | |||
| 97 | t.reset(); | ||
| 98 | Utility.logInfo("Preprocessing (and checking satisfiability)..."); | ||
| 99 | |||
| 100 | String name = "data", datafile = getImportedData(); | ||
| 101 | rlLowerStore.importRDFData(name, datafile); | ||
| 102 | rlLowerStore.materialise("lower program", program.getLower().toString()); | ||
| 103 | // program.getLower().save(); | ||
| 104 | if(!consistency.checkRLLowerBound()) { | ||
| 105 | Utility.logDebug("time for satisfiability checking: " + t.duration()); | ||
| 106 | isConsistent = ConsistencyStatus.INCONSISTENT; | ||
| 107 | return false; | ||
| 108 | } | ||
| 109 | Utility.logDebug("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); | ||
| 110 | |||
| 111 | String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); | ||
| 112 | |||
| 113 | elLowerStore.importRDFData(name, datafile); | ||
| 114 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 115 | elLowerStore.materialise("lower program", program.getLower().toString()); | ||
| 116 | elLowerStore.initialiseKarma(); | ||
| 117 | if(!consistency.checkELLowerBound()) { | ||
| 118 | Utility.logDebug("time for satisfiability checking: " + t.duration()); | ||
| 119 | isConsistent = ConsistencyStatus.INCONSISTENT; | ||
| 120 | return false; | ||
| 121 | } | ||
| 122 | |||
| 123 | if(lazyUpperStore != null) { | ||
| 124 | lazyUpperStore.importRDFData(name, datafile); | ||
| 125 | lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 126 | int tag = lazyUpperStore.materialiseRestrictedly(program, null); | ||
| 127 | if(tag == -1) { | ||
| 128 | Utility.logDebug("time for satisfiability checking: " + t.duration()); | ||
| 129 | isConsistent = ConsistencyStatus.INCONSISTENT; | ||
| 130 | return false; | ||
| 131 | } | ||
| 132 | else if(tag != 1) { | ||
| 133 | lazyUpperStore.dispose(); | ||
| 134 | lazyUpperStore = null; | ||
| 135 | } | ||
| 136 | } | ||
| 137 | if(consistency.checkUpper(lazyUpperStore)) { | ||
| 138 | isConsistent = ConsistencyStatus.CONSISTENT; | ||
| 139 | Utility.logDebug("time for satisfiability checking: " + t.duration()); | ||
| 140 | } | ||
| 141 | |||
| 142 | trackingStore.importRDFData(name, datafile); | ||
| 143 | trackingStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 144 | |||
| 145 | // materialiseFullUpper(); | ||
| 146 | // GapByStore4ID gap = new GapByStore4ID(trackingStore); | ||
| 147 | GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); | ||
| 148 | trackingStore.materialiseFoldedly(program, gap); | ||
| 149 | predicatesWithGap = gap.getPredicatesWithGap(); | ||
| 150 | gap.clear(); | ||
| 151 | |||
| 152 | if(program.getGeneral().isHorn()) | ||
| 153 | encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); | ||
| 154 | else | ||
| 155 | encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); | ||
| 156 | // encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); | ||
| 157 | // encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); | ||
| 158 | // encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); | ||
| 159 | |||
| 160 | // TODO? add consistency check by Skolem-upper-bound | ||
| 161 | |||
| 162 | if(!isConsistent()) | ||
| 163 | return false; | ||
| 164 | |||
| 165 | consistency.extractBottomFragment(); | ||
| 166 | |||
| 167 | return true; | ||
| 168 | } | ||
| 169 | |||
| 170 | @Override | ||
| 171 | public boolean isConsistent() { | ||
| 172 | if(isDisposed()) throw new DisposedException(); | ||
| 173 | |||
| 174 | if(isConsistent == ConsistencyStatus.UNCHECKED) { | ||
| 175 | isConsistent = consistency.check() ? ConsistencyStatus.CONSISTENT : ConsistencyStatus.INCONSISTENT; | ||
| 176 | Utility.logDebug("time for satisfiability checking: " + t.duration()); | ||
| 177 | } | ||
| 178 | if(isConsistent == ConsistencyStatus.CONSISTENT) { | ||
| 179 | Utility.logInfo("The ontology is consistent!"); | ||
| 180 | return true; | ||
| 181 | } | ||
| 182 | else { | ||
| 183 | Utility.logInfo("The ontology is inconsistent!"); | ||
| 184 | return false; | ||
| 185 | } | ||
| 186 | } | ||
| 187 | |||
| 188 | @Override | ||
| 189 | public void evaluate(QueryRecord queryRecord) { | ||
| 190 | if(isDisposed()) throw new DisposedException(); | ||
| 191 | |||
| 192 | if(queryLowerAndUpperBounds(queryRecord)) | ||
| 193 | return; | ||
| 194 | |||
| 195 | OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); | ||
| 196 | |||
| 197 | // queryRecord.saveRelevantOntology("/home/alessandro/Desktop/test-relevant-ontology-"+relevantOntologiesCounter+".owl"); | ||
| 198 | // relevantOntologiesCounter++; | ||
| 199 | |||
| 200 | if(properties.getSkolemUpperBound() == PagodaProperties.SkolemUpperBoundOptions.BEFORE_SUMMARISATION | ||
| 201 | && querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) { | ||
| 202 | return; | ||
| 203 | } | ||
| 204 | |||
| 205 | Utility.logInfo(">> Summarisation <<"); | ||
| 206 | HermitSummaryFilter summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); | ||
| 207 | if(summarisedChecker.check(queryRecord.getGapAnswers()) == 0) { | ||
| 208 | summarisedChecker.dispose(); | ||
| 209 | return; | ||
| 210 | } | ||
| 211 | |||
| 212 | if(properties.getSkolemUpperBound() == PagodaProperties.SkolemUpperBoundOptions.AFTER_SUMMARISATION | ||
| 213 | && querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) { | ||
| 214 | summarisedChecker.dispose(); | ||
| 215 | return; | ||
| 216 | } | ||
| 217 | |||
| 218 | Utility.logInfo(">> Full reasoning <<"); | ||
| 219 | Timer t = new Timer(); | ||
| 220 | summarisedChecker.checkByFullReasoner(queryRecord.getGapAnswers()); | ||
| 221 | Utility.logDebug("Total time for full reasoner: " + t.duration()); | ||
| 222 | |||
| 223 | if(properties.getToCallHermiT()) | ||
| 224 | queryRecord.markAsProcessed(); | ||
| 225 | summarisedChecker.dispose(); | ||
| 226 | } | ||
| 227 | |||
| 228 | @Override | ||
| 229 | public void evaluateUpper(QueryRecord queryRecord) { | ||
| 230 | if(isDisposed()) throw new DisposedException(); | ||
| 231 | // TODO? add new upper store | ||
| 232 | AnswerTuples rlAnswer = null; | ||
| 233 | boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; | ||
| 234 | try { | ||
| 235 | rlAnswer = | ||
| 236 | (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 237 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); | ||
| 238 | } finally { | ||
| 239 | if(rlAnswer != null) rlAnswer.dispose(); | ||
| 240 | } | ||
| 241 | } | ||
| 242 | |||
| 243 | @Override | ||
| 244 | public void dispose() { | ||
| 245 | super.dispose(); | ||
| 246 | |||
| 247 | if(encoder != null) encoder.dispose(); | ||
| 248 | if(rlLowerStore != null) rlLowerStore.dispose(); | ||
| 249 | if(lazyUpperStore != null) lazyUpperStore.dispose(); | ||
| 250 | if(elLowerStore != null) elLowerStore.dispose(); | ||
| 251 | if(trackingStore != null) trackingStore.dispose(); | ||
| 252 | if(consistency != null) consistency.dispose(); | ||
| 253 | if(program != null) program.dispose(); | ||
| 254 | } | ||
| 255 | |||
| 256 | private void setup(boolean considerEqualities) { | ||
| 257 | if(isDisposed()) throw new DisposedException(); | ||
| 258 | |||
| 259 | isConsistent = ConsistencyStatus.UNCHECKED; | ||
| 260 | this.equalityTag = considerEqualities; | ||
| 261 | |||
| 262 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | ||
| 263 | elLowerStore = new KarmaQueryEngine("elho-lower-bound"); | ||
| 264 | |||
| 265 | trackingStore = new MultiStageQueryEngine("tracking", false); | ||
| 266 | } | ||
| 267 | |||
| 268 | protected void internal_importDataFile(String name, String datafile) { | ||
| 269 | // addDataFile(datafile); | ||
| 270 | rlLowerStore.importRDFData(name, datafile); | ||
| 271 | if(lazyUpperStore != null) | ||
| 272 | lazyUpperStore.importRDFData(name, datafile); | ||
| 273 | elLowerStore.importRDFData(name, datafile); | ||
| 274 | trackingStore.importRDFData(name, datafile); | ||
| 275 | } | ||
| 276 | |||
| 277 | /** | ||
| 278 | * It deals with blanks nodes differently from variables | ||
| 279 | * according to SPARQL semantics for OWL2 Entailment Regime. | ||
| 280 | * <p> | ||
| 281 | * In particular variables are matched only against named individuals, | ||
| 282 | * and blank nodes against named and anonymous individuals. | ||
| 283 | */ | ||
| 284 | private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord, | ||
| 285 | Tuple<String> extendedQuery, Step step) { | ||
| 286 | t.reset(); | ||
| 287 | |||
| 288 | Utility.logDebug("First query type"); | ||
| 289 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 290 | if(!queryRecord.isProcessed() && !queryRecord.getQueryText().equals(extendedQuery.get(0))) { | ||
| 291 | Utility.logDebug("Second query type"); | ||
| 292 | queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables()); | ||
| 293 | } | ||
| 294 | if(!queryRecord.isProcessed() && queryRecord.hasNonAnsDistinguishedVariables()) { | ||
| 295 | Utility.logDebug("Third query type"); | ||
| 296 | queryUpperBound(upperStore, queryRecord, extendedQuery.get(1), queryRecord.getDistinguishedVariables()); | ||
| 297 | } | ||
| 298 | |||
| 299 | queryRecord.addProcessingTime(step, t.duration()); | ||
| 300 | if(queryRecord.isProcessed()) { | ||
| 301 | queryRecord.setDifficulty(step); | ||
| 302 | return true; | ||
| 303 | } | ||
| 304 | return false; | ||
| 305 | } | ||
| 306 | |||
| 307 | /** | ||
| 308 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. | ||
| 309 | */ | ||
| 310 | private boolean queryLowerAndUpperBounds(QueryRecord queryRecord) { | ||
| 311 | |||
| 312 | Utility.logInfo(">> Base bounds <<"); | ||
| 313 | |||
| 314 | AnswerTuples rlAnswer = null, elAnswer = null; | ||
| 315 | |||
| 316 | t.reset(); | ||
| 317 | try { | ||
| 318 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 319 | Utility.logDebug(t.duration()); | ||
| 320 | queryRecord.updateLowerBoundAnswers(rlAnswer); | ||
| 321 | } finally { | ||
| 322 | if(rlAnswer != null) rlAnswer.dispose(); | ||
| 323 | } | ||
| 324 | queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); | ||
| 325 | |||
| 326 | Tuple<String> extendedQueryTexts = queryRecord.getExtendedQueryText(); | ||
| 327 | |||
| 328 | if(properties.getUseAlwaysSimpleUpperBound() || lazyUpperStore == null) { | ||
| 329 | Utility.logDebug("Tracking store"); | ||
| 330 | if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) | ||
| 331 | return true; | ||
| 332 | } | ||
| 333 | |||
| 334 | if(!queryRecord.isBottom()) { | ||
| 335 | Utility.logDebug("Lazy store"); | ||
| 336 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) | ||
| 337 | return true; | ||
| 338 | } | ||
| 339 | |||
| 340 | t.reset(); | ||
| 341 | try { | ||
| 342 | elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0), | ||
| 343 | queryRecord.getAnswerVariables(), | ||
| 344 | queryRecord.getLowerBoundAnswers()); | ||
| 345 | Utility.logDebug(t.duration()); | ||
| 346 | queryRecord.updateLowerBoundAnswers(elAnswer); | ||
| 347 | } finally { | ||
| 348 | if(elAnswer != null) elAnswer.dispose(); | ||
| 349 | } | ||
| 350 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); | ||
| 351 | |||
| 352 | if(queryRecord.isProcessed()) { | ||
| 353 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); | ||
| 354 | return true; | ||
| 355 | } | ||
| 356 | |||
| 357 | return false; | ||
| 358 | } | ||
| 359 | |||
| 360 | private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { | ||
| 361 | Utility.logInfo(">> Relevant ontology-subset extraction <<"); | ||
| 362 | |||
| 363 | t.reset(); | ||
| 364 | |||
| 365 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); | ||
| 366 | OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true); | ||
| 367 | |||
| 368 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); | ||
| 369 | |||
| 370 | int numOfABoxAxioms = relevantOntologySubset.getABoxAxioms(true).size(); | ||
| 371 | int numOfTBoxAxioms = relevantOntologySubset.getAxiomCount() - numOfABoxAxioms; | ||
| 372 | Utility.logInfo("Relevant ontology-subset has been extracted: |ABox|=" | ||
| 373 | + numOfABoxAxioms + ", |TBox|=" + numOfTBoxAxioms); | ||
| 374 | |||
| 375 | return relevantOntologySubset; | ||
| 376 | } | ||
| 377 | |||
| 378 | private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { | ||
| 379 | AnswerTuples rlAnswer = null; | ||
| 380 | try { | ||
| 381 | Utility.logDebug(queryText); | ||
| 382 | rlAnswer = upperStore.evaluate(queryText, answerVariables); | ||
| 383 | Utility.logDebug(t.duration()); | ||
| 384 | queryRecord.updateUpperBoundAnswers(rlAnswer); | ||
| 385 | } finally { | ||
| 386 | if(rlAnswer != null) rlAnswer.dispose(); | ||
| 387 | } | ||
| 388 | } | ||
| 389 | |||
| 390 | private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { | ||
| 391 | Utility.logInfo(">> Semi-Skolemisation <<"); | ||
| 392 | t.reset(); | ||
| 393 | |||
| 394 | DatalogProgram relevantProgram = new DatalogProgram(relevantSubset); | ||
| 395 | |||
| 396 | MultiStageQueryEngine relevantStore = | ||
| 397 | new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true | ||
| 398 | |||
| 399 | relevantStore.importDataFromABoxOf(relevantSubset); | ||
| 400 | String relevantOriginalMarkProgram = OWLHelper.getOriginalMarkProgram(relevantSubset); | ||
| 401 | |||
| 402 | relevantStore.materialise("Mark original individuals", relevantOriginalMarkProgram); | ||
| 403 | |||
| 404 | boolean isFullyProcessed = false; | ||
| 405 | LinkedList<Tuple<Long>> lastTwoTriplesCounts = new LinkedList<>(); | ||
| 406 | for (int currentMaxTermDepth = 1; !isFullyProcessed; currentMaxTermDepth++) { | ||
| 407 | |||
| 408 | if(currentMaxTermDepth > properties.getSkolemDepth()) { | ||
| 409 | Utility.logInfo("Maximum term depth reached"); | ||
| 410 | break; | ||
| 411 | } | ||
| 412 | |||
| 413 | if(lastTwoTriplesCounts.size() == 2) { | ||
| 414 | if(lastTwoTriplesCounts.get(0).get(1).equals(lastTwoTriplesCounts.get(1).get(1))) | ||
| 415 | break; | ||
| 416 | |||
| 417 | ExponentialInterpolation interpolation = new ExponentialInterpolation(lastTwoTriplesCounts.get(0).get(0), | ||
| 418 | lastTwoTriplesCounts.get(0).get(1), | ||
| 419 | lastTwoTriplesCounts.get(1).get(0), | ||
| 420 | lastTwoTriplesCounts.get(1).get(1)); | ||
| 421 | double triplesEstimate = interpolation.computeValue(currentMaxTermDepth); | ||
| 422 | |||
| 423 | Utility.logDebug("Estimate of the number of triples:" + triplesEstimate); | ||
| 424 | |||
| 425 | // exit condition if the query is not fully answered | ||
| 426 | if(triplesEstimate > properties.getMaxTriplesInSkolemStore()) { | ||
| 427 | Utility.logInfo("Interrupting Semi-Skolemisation because of triples count limit"); | ||
| 428 | break; | ||
| 429 | } | ||
| 430 | } | ||
| 431 | |||
| 432 | Utility.logInfo("Trying with maximum depth " + currentMaxTermDepth); | ||
| 433 | |||
| 434 | int materialisationTag = relevantStore.materialiseSkolemly(relevantProgram, null, | ||
| 435 | currentMaxTermDepth); | ||
| 436 | queryRecord.addProcessingTime(Step.SKOLEM_UPPER_BOUND, t.duration()); | ||
| 437 | if(materialisationTag == -1) { | ||
| 438 | relevantStore.dispose(); | ||
| 439 | throw new Error("A consistent ontology has turned out to be " + | ||
| 440 | "inconsistent in the Skolemises-relevant-upper-store"); | ||
| 441 | } | ||
| 442 | else if(materialisationTag != 1) { | ||
| 443 | Utility.logInfo("Semi-Skolemised relevant upper store cannot be employed"); | ||
| 444 | break; | ||
| 445 | } | ||
| 446 | |||
| 447 | Utility.logInfo("Querying semi-Skolemised upper store..."); | ||
| 448 | isFullyProcessed = queryUpperStore(relevantStore, queryRecord, | ||
| 449 | queryRecord.getExtendedQueryText(), | ||
| 450 | Step.SKOLEM_UPPER_BOUND); | ||
| 451 | |||
| 452 | try { | ||
| 453 | lastTwoTriplesCounts.add | ||
| 454 | (new Tuple<>((long) currentMaxTermDepth, relevantStore.getStoreSize())); | ||
| 455 | } catch (JRDFStoreException e) { | ||
| 456 | e.printStackTrace(); | ||
| 457 | break; | ||
| 458 | } | ||
| 459 | if(lastTwoTriplesCounts.size() > 2) | ||
| 460 | lastTwoTriplesCounts.remove(); | ||
| 461 | |||
| 462 | Utility.logDebug("Last two triples counts:" + lastTwoTriplesCounts); | ||
| 463 | } | ||
| 464 | |||
| 465 | relevantStore.dispose(); | ||
| 466 | Utility.logInfo("Semi-Skolemised relevant upper store has been evaluated"); | ||
| 467 | return isFullyProcessed; | ||
| 468 | } | ||
| 469 | |||
| 470 | private enum ConsistencyStatus {CONSISTENT, INCONSISTENT, UNCHECKED} | ||
| 471 | |||
| 472 | } | ||
