diff options
| author | RncLsn <rnc.lsn@gmail.com> | 2015-05-15 17:32:22 +0100 |
|---|---|---|
| committer | RncLsn <rnc.lsn@gmail.com> | 2015-05-15 17:32:22 +0100 |
| commit | 1b6a128137e5d7a6ff75566869232fc054afabef (patch) | |
| tree | 3def49c3c9c1e2ebebc49b82d9eb562b6d097cad /src/uk/ac/ox/cs | |
| parent | bd995407098d1b0c79c17a28b0b23a2c24a493c6 (diff) | |
| download | ACQuA-1b6a128137e5d7a6ff75566869232fc054afabef.tar.gz ACQuA-1b6a128137e5d7a6ff75566869232fc054afabef.zip | |
Testing and fixing. Executed successfully on UOBM{1,2,3,4,5,6,7,8}.
Diffstat (limited to 'src/uk/ac/ox/cs')
12 files changed, 183 insertions, 220 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java b/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java index a60b664..1e17dac 100644 --- a/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java +++ b/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java | |||
| @@ -1,64 +1,23 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.approx; | 1 | package uk.ac.ox.cs.pagoda.approx; |
| 2 | 2 | ||
| 3 | import java.io.BufferedOutputStream; | ||
| 4 | import java.io.FileOutputStream; | ||
| 5 | import java.io.IOException; | ||
| 6 | import java.io.ObjectOutput; | ||
| 7 | import java.io.ObjectOutputStream; | ||
| 8 | import java.util.Collections; | ||
| 9 | import java.util.HashMap; | ||
| 10 | import java.util.HashSet; | ||
| 11 | import java.util.Iterator; | ||
| 12 | import java.util.LinkedList; | ||
| 13 | import java.util.Map; | ||
| 14 | import java.util.Random; | ||
| 15 | import java.util.Set; | ||
| 16 | |||
| 17 | import org.semanticweb.HermiT.Configuration; | 3 | import org.semanticweb.HermiT.Configuration; |
| 18 | import org.semanticweb.HermiT.model.DLClause; | 4 | import org.semanticweb.HermiT.model.DLClause; |
| 19 | import org.semanticweb.HermiT.model.DLOntology; | 5 | import org.semanticweb.HermiT.model.DLOntology; |
| 20 | import org.semanticweb.HermiT.structural.OWLClausification; | 6 | import org.semanticweb.HermiT.structural.OWLClausification; |
| 21 | import org.semanticweb.owlapi.model.IRI; | 7 | import org.semanticweb.owlapi.model.*; |
| 22 | import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; | ||
| 23 | import org.semanticweb.owlapi.model.OWLAxiom; | ||
| 24 | import org.semanticweb.owlapi.model.OWLClass; | ||
| 25 | import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; | ||
| 26 | import org.semanticweb.owlapi.model.OWLClassExpression; | ||
| 27 | import org.semanticweb.owlapi.model.OWLDataFactory; | ||
| 28 | import org.semanticweb.owlapi.model.OWLDataHasValue; | ||
| 29 | import org.semanticweb.owlapi.model.OWLDataMaxCardinality; | ||
| 30 | import org.semanticweb.owlapi.model.OWLDataMinCardinality; | ||
| 31 | import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; | ||
| 32 | import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; | ||
| 33 | import org.semanticweb.owlapi.model.OWLDatatype; | ||
| 34 | import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; | ||
| 35 | import org.semanticweb.owlapi.model.OWLIndividual; | ||
| 36 | import org.semanticweb.owlapi.model.OWLNamedIndividual; | ||
| 37 | import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; | ||
| 38 | import org.semanticweb.owlapi.model.OWLObjectComplementOf; | ||
| 39 | import org.semanticweb.owlapi.model.OWLObjectHasValue; | ||
| 40 | import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; | ||
| 41 | import org.semanticweb.owlapi.model.OWLObjectMinCardinality; | ||
| 42 | import org.semanticweb.owlapi.model.OWLObjectOneOf; | ||
| 43 | import org.semanticweb.owlapi.model.OWLObjectProperty; | ||
| 44 | import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; | ||
| 45 | import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; | ||
| 46 | import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; | ||
| 47 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 48 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; | ||
| 49 | import org.semanticweb.owlapi.model.OWLOntologyManager; | ||
| 50 | import org.semanticweb.owlapi.model.OWLOntologyStorageException; | ||
| 51 | import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; | ||
| 52 | import org.semanticweb.owlapi.profiles.OWL2RLProfile; | 8 | import org.semanticweb.owlapi.profiles.OWL2RLProfile; |
| 53 | import org.semanticweb.owlapi.profiles.OWLProfileReport; | 9 | import org.semanticweb.owlapi.profiles.OWLProfileReport; |
| 54 | import org.semanticweb.owlapi.profiles.OWLProfileViolation; | 10 | import org.semanticweb.owlapi.profiles.OWLProfileViolation; |
| 55 | |||
| 56 | import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; | 11 | import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; |
| 57 | import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; | 12 | import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; |
| 58 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | 13 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; |
| 59 | import uk.ac.ox.cs.pagoda.util.Namespace; | 14 | import uk.ac.ox.cs.pagoda.util.Namespace; |
| 60 | import uk.ac.ox.cs.pagoda.util.Utility; | 15 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 61 | 16 | ||
| 17 | import java.io.*; | ||
| 18 | import java.nio.file.Paths; | ||
| 19 | import java.util.*; | ||
| 20 | |||
| 62 | public class RLPlusOntology implements KnowledgeBase { | 21 | public class RLPlusOntology implements KnowledgeBase { |
| 63 | 22 | ||
| 64 | OWLOntologyManager manager; | 23 | OWLOntologyManager manager; |
| @@ -111,7 +70,7 @@ public class RLPlusOntology implements KnowledgeBase { | |||
| 111 | if (!tOntoIRI.endsWith(originalExtension)) tOntoIRI += originalExtension; | 70 | if (!tOntoIRI.endsWith(originalExtension)) tOntoIRI += originalExtension; |
| 112 | 71 | ||
| 113 | String rlOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-RL.owl" : tOntoIRI.replaceFirst(originalExtension, "-RL.owl"); | 72 | String rlOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-RL.owl" : tOntoIRI.replaceFirst(originalExtension, "-RL.owl"); |
| 114 | String rlDocumentIRI = (outputPath = Utility.TempDirectory + "RL.owl"); | 73 | String rlDocumentIRI = (outputPath = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "RL.owl").toString()); |
| 115 | outputOntology = manager.createOntology(IRI.create(rlOntologyIRI)); | 74 | outputOntology = manager.createOntology(IRI.create(rlOntologyIRI)); |
| 116 | manager.setOntologyDocumentIRI(outputOntology, IRI.create(Utility.toFileIRI(rlDocumentIRI))); | 75 | manager.setOntologyDocumentIRI(outputOntology, IRI.create(Utility.toFileIRI(rlDocumentIRI))); |
| 117 | 76 | ||
| @@ -119,8 +78,8 @@ public class RLPlusOntology implements KnowledgeBase { | |||
| 119 | tBoxOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-TBox.owl" : tOntoIRI.replaceFirst(originalExtension, "-TBox.owl"); | 78 | tBoxOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-TBox.owl" : tOntoIRI.replaceFirst(originalExtension, "-TBox.owl"); |
| 120 | aBoxOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-ABox.owl" : tOntoIRI.replaceFirst(originalExtension, "-ABox.owl"); | 79 | aBoxOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-ABox.owl" : tOntoIRI.replaceFirst(originalExtension, "-ABox.owl"); |
| 121 | 80 | ||
| 122 | String tBoxDocumentIRI = (Utility.TempDirectory + "TBox.owl"); | 81 | String tBoxDocumentIRI = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "TBox.owl").toString(); |
| 123 | String aBoxDocumentIRI = (aBoxPath = Utility.TempDirectory + "ABox.owl"); | 82 | String aBoxDocumentIRI = (aBoxPath = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "ABox.owl").toString()); |
| 124 | tBox = manager.createOntology(IRI.create(tBoxOntologyIRI)); | 83 | tBox = manager.createOntology(IRI.create(tBoxOntologyIRI)); |
| 125 | aBox = manager.createOntology(IRI.create(aBoxOntologyIRI)); | 84 | aBox = manager.createOntology(IRI.create(aBoxOntologyIRI)); |
| 126 | manager.setOntologyDocumentIRI(tBox, IRI.create(Utility.toFileIRI(tBoxDocumentIRI))); | 85 | manager.setOntologyDocumentIRI(tBox, IRI.create(Utility.toFileIRI(tBoxDocumentIRI))); |
| @@ -488,7 +447,7 @@ public class RLPlusOntology implements KnowledgeBase { | |||
| 488 | addedAxioms.add(factory.getOWLObjectPropertyRangeAxiom(r, tExp)); | 447 | addedAxioms.add(factory.getOWLObjectPropertyRangeAxiom(r, tExp)); |
| 489 | } | 448 | } |
| 490 | else if (botStrategy != BottomStrategy.TOREMOVE) { | 449 | else if (botStrategy != BottomStrategy.TOREMOVE) { |
| 491 | OWLClass cls = (OWLClass) ((OWLObjectComplementOf) tExp).getComplementNNF(); | 450 | OWLClass cls = (OWLClass) tExp.getComplementNNF(); |
| 492 | OWLClass neg; | 451 | OWLClass neg; |
| 493 | if ((neg = atomic2negation.get(cls)) == null) { | 452 | if ((neg = atomic2negation.get(cls)) == null) { |
| 494 | neg = getNewConcept(outputOntology, rlCounter); | 453 | neg = getNewConcept(outputOntology, rlCounter); |
| @@ -632,6 +591,6 @@ public class RLPlusOntology implements KnowledgeBase { | |||
| 632 | corrFileName = path; | 591 | corrFileName = path; |
| 633 | } | 592 | } |
| 634 | 593 | ||
| 635 | private static enum BottomStrategy { TOREMOVE, NULLARY, UNARY } | 594 | private enum BottomStrategy { TOREMOVE, NULLARY, UNARY } |
| 636 | } | 595 | } |
| 637 | 596 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java index 4ba2715..50996d0 100644 --- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java | |||
| @@ -133,8 +133,8 @@ public class MultiStageQueryEngine extends StageQueryEngine { | |||
| 133 | subTimer.reset(); | 133 | subTimer.reset(); |
| 134 | if ((violations = program.isIntegrated(this, incrementally)) == null || violations.size() == 0) { | 134 | if ((violations = program.isIntegrated(this, incrementally)) == null || violations.size() == 0) { |
| 135 | store.clearRulesAndMakeFactsExplicit(); | 135 | store.clearRulesAndMakeFactsExplicit(); |
| 136 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - tripleCountBeforeMat) + " new)"); | 136 | Utility.logInfo(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - tripleCountBeforeMat) + " new)"); |
| 137 | Utility.logInfo(name + " store is DONE for multi-stage materialising in " + t.duration() + " seconds."); | 137 | Utility.logInfo(name + " store is DONE for multi-stage materialising in " + t.duration() + " seconds."); |
| 138 | return isValid() ? 1 : 0; | 138 | return isValid() ? 1 : 0; |
| 139 | } | 139 | } |
| 140 | Utility.logDebug("Time to detect violations: " + subTimer.duration()); | 140 | Utility.logDebug("Time to detect violations: " + subTimer.duration()); |
diff --git a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java index 6c87eb5..15b2c01 100644 --- a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java +++ b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java | |||
| @@ -69,7 +69,7 @@ public class QueryRecord { | |||
| 69 | } | 69 | } |
| 70 | 70 | ||
| 71 | public boolean updateLowerBoundAnswers(AnswerTuples answerTuples) { | 71 | public boolean updateLowerBoundAnswers(AnswerTuples answerTuples) { |
| 72 | if (answerTuples == null) return false; | 72 | if (answerTuples == null) return false; |
| 73 | boolean update = false; | 73 | boolean update = false; |
| 74 | for (AnswerTuple tuple; answerTuples.isValid(); answerTuples.moveNext()) { | 74 | for (AnswerTuple tuple; answerTuples.isValid(); answerTuples.moveNext()) { |
| 75 | tuple = answerTuples.getTuple(); | 75 | tuple = answerTuples.getTuple(); |
| @@ -77,8 +77,12 @@ public class QueryRecord { | |||
| 77 | soundAnswerTuples.add(tuple); | 77 | soundAnswerTuples.add(tuple); |
| 78 | if (gapAnswerTuples != null) | 78 | if (gapAnswerTuples != null) |
| 79 | gapAnswerTuples.remove(tuple); | 79 | gapAnswerTuples.remove(tuple); |
| 80 | update = true; | 80 | update = true; |
| 81 | } | 81 | } |
| 82 | // TODO could be wrong, but if possible add the check | ||
| 83 | // else if (! gapAnswerTuples.contains(tuple)) { | ||
| 84 | // throw new IllegalArgumentException("The lower bound answers must be contained in the upper ones!"); | ||
| 85 | // } | ||
| 82 | } | 86 | } |
| 83 | Utility.logInfo("The number of answers in the lower bound: " + soundAnswerTuples.size()); | 87 | Utility.logInfo("The number of answers in the lower bound: " + soundAnswerTuples.size()); |
| 84 | 88 | ||
| @@ -103,14 +107,16 @@ public class QueryRecord { | |||
| 103 | for (; answerTuples.isValid(); answerTuples.moveNext()) { | 107 | for (; answerTuples.isValid(); answerTuples.moveNext()) { |
| 104 | ++number; | 108 | ++number; |
| 105 | } | 109 | } |
| 106 | Utility.logInfo("The number of answers returned by the upper bound: " + number); | 110 | Utility.logInfo("The number of answers returned by an upper bound: " + number); |
| 107 | if (number <= soundAnswerTuples.size()) { | 111 | if (number <= soundAnswerTuples.size()) { |
| 108 | if (gapAnswerTuples != null) gapAnswerTuples.clear(); | 112 | if (gapAnswerTuples != null) gapAnswerTuples.clear(); |
| 109 | else gapAnswerTuples = new HashSet<AnswerTuple>(); | 113 | else gapAnswerTuples = new HashSet<AnswerTuple>(); |
| 110 | 114 | ||
| 111 | Utility.logInfo("The number of answers in the upper bound: " + (soundAnswerTuples.size() + gapAnswerTuples.size())); | 115 | Utility.logInfo("The number of upper bound answers: " + (soundAnswerTuples.size() + gapAnswerTuples.size())); |
| 112 | return false; | 116 | return false; |
| 113 | } | 117 | } |
| 118 | else if (number < soundAnswerTuples.size()) | ||
| 119 | throw new IllegalArgumentException("The upper bound answers must contain all the lower bound ones!"); | ||
| 114 | answerTuples.reset(); | 120 | answerTuples.reset(); |
| 115 | } | 121 | } |
| 116 | 122 | ||
| @@ -362,8 +368,8 @@ public class QueryRecord { | |||
| 362 | } | 368 | } |
| 363 | 369 | ||
| 364 | public enum Step {LowerBound, UpperBound, ELLowerBound, | 370 | public enum Step {LowerBound, UpperBound, ELLowerBound, |
| 365 | Fragment, FragmentRefinement, Summarisation, Dependency, FullReasoning}; | 371 | Fragment, FragmentRefinement, Summarisation, Dependency, FullReasoning} |
| 366 | 372 | ||
| 367 | double[] timer; | 373 | double[] timer; |
| 368 | 374 | ||
| 369 | public void addProcessingTime(Step step, double time) { | 375 | public void addProcessingTime(Step step, double time) { |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java index b7a3667..409a2c9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java | |||
| @@ -15,6 +15,7 @@ import uk.ac.ox.cs.pagoda.query.AnswerTuples; | |||
| 15 | import uk.ac.ox.cs.pagoda.query.QueryManager; | 15 | import uk.ac.ox.cs.pagoda.query.QueryManager; |
| 16 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | 16 | import uk.ac.ox.cs.pagoda.query.QueryRecord; |
| 17 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; | 17 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; |
| 18 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 18 | import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; | 19 | import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; |
| 19 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; | 20 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; |
| 20 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | 21 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; |
| @@ -54,7 +55,7 @@ public class ConsistencyManager { | |||
| 54 | 55 | ||
| 55 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | 56 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { |
| 56 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | 57 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); |
| 57 | return unsatisfiability(t.duration()); | 58 | return false; |
| 58 | } | 59 | } |
| 59 | return true; | 60 | return true; |
| 60 | } | 61 | } |
| @@ -63,39 +64,20 @@ public class ConsistencyManager { | |||
| 63 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | 64 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); |
| 64 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | 65 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { |
| 65 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | 66 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); |
| 66 | return unsatisfiability(t.duration()); | 67 | return true; |
| 67 | } | 68 | } |
| 68 | return true; | 69 | return true; |
| 69 | } | 70 | } |
| 70 | |||
| 71 | boolean checkLazyUpper() { | ||
| 72 | if (m_reasoner.lazyUpperStore != null) { | ||
| 73 | AnswerTuples tuples = null; | ||
| 74 | try { | ||
| 75 | tuples = m_reasoner.lazyUpperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | ||
| 76 | |||
| 77 | Utility.logDebug("CheckLazyUpperBound: answerVars=" + fullQueryRecord.getAnswerVariables()); | ||
| 78 | |||
| 79 | if (!tuples.isValid()) { | ||
| 80 | Utility.logInfo("There are no contradictions derived in the lazy upper bound materialisation."); | ||
| 81 | return satisfiability(t.duration()); | ||
| 82 | } | ||
| 83 | } | ||
| 84 | finally { | ||
| 85 | if (tuples != null) tuples.dispose(); | ||
| 86 | } | ||
| 87 | } | ||
| 88 | return false; | ||
| 89 | } | ||
| 90 | 71 | ||
| 91 | boolean checkSkolemUpper() { | 72 | boolean checkUpper(BasicQueryEngine upperStore) { |
| 92 | if (m_reasoner.limitedSkolemUpperStore != null) { | 73 | if (upperStore != null) { |
| 93 | AnswerTuples tuples = null; | 74 | AnswerTuples tuples = null; |
| 94 | try { | 75 | try { |
| 95 | tuples = m_reasoner.limitedSkolemUpperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | 76 | tuples = upperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); |
| 96 | if (!tuples.isValid()) { | 77 | if (!tuples.isValid()) { |
| 97 | Utility.logInfo("There are no contradictions derived in the limited-skolem upper bound materialisation."); | 78 | Utility.logInfo("There are no contradictions derived in "+ upperStore.getName() +" materialisation."); |
| 98 | return satisfiability(t.duration()); | 79 | Utility.logDebug("The ontology and dataset is satisfiable."); |
| 80 | return true; | ||
| 99 | } | 81 | } |
| 100 | } | 82 | } |
| 101 | finally { | 83 | finally { |
| @@ -104,7 +86,11 @@ public class ConsistencyManager { | |||
| 104 | } | 86 | } |
| 105 | return false; | 87 | return false; |
| 106 | } | 88 | } |
| 107 | 89 | ||
| 90 | void dispose() { | ||
| 91 | fullQueryRecord.dispose(); | ||
| 92 | } | ||
| 93 | |||
| 108 | boolean check() { | 94 | boolean check() { |
| 109 | // if (!checkRLLowerBound()) return false; | 95 | // if (!checkRLLowerBound()) return false; |
| 110 | // if (!checkELLowerBound()) return false; | 96 | // if (!checkELLowerBound()) return false; |
| @@ -119,7 +105,7 @@ public class ConsistencyManager { | |||
| 119 | } | 105 | } |
| 120 | 106 | ||
| 121 | if (fullQueryRecord.getNoOfCompleteAnswers() == 0) | 107 | if (fullQueryRecord.getNoOfCompleteAnswers() == 0) |
| 122 | return satisfiability(t.duration()); | 108 | return true; |
| 123 | 109 | ||
| 124 | extractBottomFragment(); | 110 | extractBottomFragment(); |
| 125 | 111 | ||
| @@ -139,7 +125,7 @@ public class ConsistencyManager { | |||
| 139 | checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r); | 125 | checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r); |
| 140 | satisfiability = checker.isConsistent(); | 126 | satisfiability = checker.isConsistent(); |
| 141 | checker.dispose(); | 127 | checker.dispose(); |
| 142 | if (!satisfiability) return unsatisfiability(t.duration()); | 128 | if (!satisfiability) return false; |
| 143 | } | 129 | } |
| 144 | 130 | ||
| 145 | // Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); | 131 | // Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); |
| @@ -147,20 +133,20 @@ public class ConsistencyManager { | |||
| 147 | // checker.dispose(); | 133 | // checker.dispose(); |
| 148 | // if (!satisfiable) return unsatisfiability(t.duration()); | 134 | // if (!satisfiable) return unsatisfiability(t.duration()); |
| 149 | 135 | ||
| 150 | return satisfiability(t.duration()); | 136 | return true; |
| 151 | } | 137 | } |
| 152 | 138 | ||
| 153 | protected boolean unsatisfiability(double duration) { | 139 | // protected boolean unsatisfiability(double duration) { |
| 154 | fullQueryRecord.dispose(); | 140 | // fullQueryRecord.dispose(); |
| 155 | Utility.logDebug("The ontology and dataset is unsatisfiable."); | 141 | // Utility.logDebug("The ontology and dataset is unsatisfiable."); |
| 156 | return false; | 142 | // return false; |
| 157 | } | 143 | // } |
| 158 | 144 | ||
| 159 | protected boolean satisfiability(double duration) { | 145 | // protected boolean satisfiability(double duration) { |
| 160 | fullQueryRecord.dispose(); | 146 | // fullQueryRecord.dispose(); |
| 161 | Utility.logDebug("The ontology and dataset is satisfiable."); | 147 | // Utility.logDebug("The ontology and dataset is satisfiable."); |
| 162 | return true; | 148 | // return true; |
| 163 | } | 149 | // } |
| 164 | 150 | ||
| 165 | boolean fragmentExtracted = false; | 151 | boolean fragmentExtracted = false; |
| 166 | 152 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java index 9c335f3..9191067 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java | |||
| @@ -2,7 +2,6 @@ package uk.ac.ox.cs.pagoda.reasoner; | |||
| 2 | 2 | ||
| 3 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; | 3 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; |
| 4 | import org.semanticweb.owlapi.model.OWLOntologyManager; | 4 | import org.semanticweb.owlapi.model.OWLOntologyManager; |
| 5 | |||
| 6 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | 5 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; |
| 7 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | 6 | import uk.ac.ox.cs.pagoda.query.QueryRecord; |
| 8 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; | 7 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; |
| @@ -31,8 +30,9 @@ public class ConsistencyManager2 extends ConsistencyManager { | |||
| 31 | @Override | 30 | @Override |
| 32 | boolean check() { | 31 | boolean check() { |
| 33 | // if (!checkRLLowerBound()) return false; | 32 | // if (!checkRLLowerBound()) return false; |
| 34 | // if (!checkELLowerBound()) return false; | 33 | // if (!checkELLowerBound()) return false; |
| 35 | if (checkLazyUpper()) return true; | 34 | // TODO test |
| 35 | if (checkUpper(m_reasoner.lazyUpperStore) && checkUpper(m_reasoner.limitedSkolemUpperStore)) return true; | ||
| 36 | AnswerTuples iter = null; | 36 | AnswerTuples iter = null; |
| 37 | 37 | ||
| 38 | try { | 38 | try { |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index 36ea7de..1f435b7 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | |||
| @@ -50,6 +50,7 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 50 | private Collection<String> predicatesWithGap = null; | 50 | private Collection<String> predicatesWithGap = null; |
| 51 | private Boolean satisfiable; | 51 | private Boolean satisfiable; |
| 52 | private ConsistencyManager consistency = new ConsistencyManager(this); | 52 | private ConsistencyManager consistency = new ConsistencyManager(this); |
| 53 | private boolean useUpperStores = false; | ||
| 53 | 54 | ||
| 54 | public MyQueryReasoner() { | 55 | public MyQueryReasoner() { |
| 55 | setup(true, true); | 56 | setup(true, true); |
| @@ -101,9 +102,9 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 101 | // program.getUpper().save(); | 102 | // program.getUpper().save(); |
| 102 | // program.getGeneral().save(); | 103 | // program.getGeneral().save(); |
| 103 | 104 | ||
| 104 | if (multiStageTag && !program.getGeneral().isHorn()) { | 105 | useUpperStores = multiStageTag && !program.getGeneral().isHorn(); |
| 106 | if (useUpperStores) { | ||
| 105 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); // | 107 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); // |
| 106 | // TODO CHECK | ||
| 107 | limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); | 108 | limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); |
| 108 | } | 109 | } |
| 109 | 110 | ||
| @@ -120,7 +121,7 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 120 | @Override | 121 | @Override |
| 121 | public boolean preprocess() { | 122 | public boolean preprocess() { |
| 122 | t.reset(); | 123 | t.reset(); |
| 123 | Utility.logInfo("Preprocessing ... checking satisfiability ... "); | 124 | Utility.logInfo("Preprocessing... checking satisfiability... "); |
| 124 | 125 | ||
| 125 | String name = "data", datafile = importedData.toString(); | 126 | String name = "data", datafile = importedData.toString(); |
| 126 | rlLowerStore.importRDFData(name, datafile); | 127 | rlLowerStore.importRDFData(name, datafile); |
| @@ -147,12 +148,11 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 147 | } | 148 | } |
| 148 | if (tag == -1) return false; | 149 | if (tag == -1) return false; |
| 149 | } | 150 | } |
| 150 | if (consistency.checkLazyUpper()) { | 151 | if (consistency.checkUpper(lazyUpperStore)) { |
| 151 | satisfiable = true; | 152 | satisfiable = true; |
| 152 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | 153 | Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 153 | } | 154 | } |
| 154 | 155 | ||
| 155 | // TODO check | ||
| 156 | if (limitedSkolemUpperStore != null) { | 156 | if (limitedSkolemUpperStore != null) { |
| 157 | limitedSkolemUpperStore.importRDFData(name, datafile); | 157 | limitedSkolemUpperStore.importRDFData(name, datafile); |
| 158 | limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); | 158 | limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); |
| @@ -163,16 +163,14 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 163 | } | 163 | } |
| 164 | if (tag == -1) return false; | 164 | if (tag == -1) return false; |
| 165 | } | 165 | } |
| 166 | // FIXME nullPointerException | 166 | if (consistency.checkUpper(limitedSkolemUpperStore)) { |
| 167 | // if (consistency.checkSkolemUpper()) { | 167 | satisfiable = true; |
| 168 | // satisfiable = true; | 168 | Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 169 | // Utility.logInfo("time for satisfiability checking: " + t.duration()); | 169 | } |
| 170 | // } | ||
| 171 | 170 | ||
| 172 | trackingStore.importRDFData(name, datafile); | 171 | trackingStore.importRDFData(name, datafile); |
| 173 | trackingStore.materialise("saturate named individuals", originalMarkProgram); | 172 | trackingStore.materialise("saturate named individuals", originalMarkProgram); |
| 174 | 173 | ||
| 175 | // materialiseFullUpper(); | ||
| 176 | GapByStore4ID gap = new GapByStore4ID(trackingStore); | 174 | GapByStore4ID gap = new GapByStore4ID(trackingStore); |
| 177 | trackingStore.materialiseFoldedly(program, gap); | 175 | trackingStore.materialiseFoldedly(program, gap); |
| 178 | predicatesWithGap = gap.getPredicatesWithGap(); | 176 | predicatesWithGap = gap.getPredicatesWithGap(); |
| @@ -192,6 +190,7 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 192 | return false; | 190 | return false; |
| 193 | 191 | ||
| 194 | consistency.extractBottomFragment(); | 192 | consistency.extractBottomFragment(); |
| 193 | consistency.dispose(); | ||
| 195 | return true; | 194 | return true; |
| 196 | } | 195 | } |
| 197 | 196 | ||
| @@ -204,6 +203,9 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 204 | return satisfiable; | 203 | return satisfiable; |
| 205 | } | 204 | } |
| 206 | 205 | ||
| 206 | /** | ||
| 207 | * Returns the relevant part of the ontology, while computing the bound answers. | ||
| 208 | * */ | ||
| 207 | private OWLOntology relevantPart(QueryRecord queryRecord) { | 209 | private OWLOntology relevantPart(QueryRecord queryRecord) { |
| 208 | AnswerTuples rlAnswer = null, elAnswer = null; | 210 | AnswerTuples rlAnswer = null, elAnswer = null; |
| 209 | 211 | ||
| @@ -216,13 +218,13 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 216 | if (rlAnswer != null) rlAnswer.dispose(); | 218 | if (rlAnswer != null) rlAnswer.dispose(); |
| 217 | } | 219 | } |
| 218 | queryRecord.addProcessingTime(Step.LowerBound, t.duration()); | 220 | queryRecord.addProcessingTime(Step.LowerBound, t.duration()); |
| 219 | rlAnswer = null; | ||
| 220 | 221 | ||
| 221 | t.reset(); | 222 | t.reset(); |
| 222 | BasicQueryEngine upperStore = queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore; | 223 | BasicQueryEngine upperStore = queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore; |
| 223 | 224 | ||
| 224 | String[] extendedQuery = queryRecord.getExtendedQueryText(); | 225 | String[] extendedQuery = queryRecord.getExtendedQueryText(); |
| 225 | 226 | ||
| 227 | // TODO why the following??? | ||
| 226 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 228 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 227 | 229 | ||
| 228 | if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0])) { | 230 | if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0])) { |
| @@ -232,25 +234,21 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 232 | queryUpperBound(upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables()); | 234 | queryUpperBound(upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables()); |
| 233 | } | 235 | } |
| 234 | 236 | ||
| 235 | Utility.logDebug(toJsonKeyValuePair("upperBound1", queryRecord)); | 237 | // Utility.logDebug(toJsonKeyValuePair("upperBound", queryRecord)); |
| 236 | 238 | ||
| 237 | // TODO check whether it is harmful. In case is not, implement it properly | 239 | // TODO test intersection and new upper bound |
| 238 | // BEGIN: trying to intersect | ||
| 239 | if (!queryRecord.isBottom() && lazyUpperStore != null) { | 240 | if (!queryRecord.isBottom() && lazyUpperStore != null) { |
| 240 | queryUpperBound(trackingStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 241 | queryUpperBound(trackingStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 241 | } | 242 | } |
| 242 | if (!queryRecord.isBottom() && limitedSkolemUpperStore != null) { | 243 | if (!queryRecord.isBottom() && limitedSkolemUpperStore != null) { |
| 243 | queryUpperBound(limitedSkolemUpperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 244 | queryUpperBound(limitedSkolemUpperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 244 | } | 245 | } |
| 245 | // END: trying to intersect | ||
| 246 | 246 | ||
| 247 | queryRecord.addProcessingTime(Step.UpperBound, t.duration()); | 247 | queryRecord.addProcessingTime(Step.UpperBound, t.duration()); |
| 248 | if (queryRecord.processed()) { | 248 | if (queryRecord.processed()) { |
| 249 | queryRecord.setDifficulty(Step.UpperBound); | 249 | queryRecord.setDifficulty(Step.UpperBound); |
| 250 | return null; | 250 | return null; |
| 251 | } | 251 | } |
| 252 | |||
| 253 | // TODO add evaluation on new upper store | ||
| 254 | 252 | ||
| 255 | t.reset(); | 253 | t.reset(); |
| 256 | try { | 254 | try { |
| @@ -327,15 +325,15 @@ public class MyQueryReasoner extends QueryReasoner { | |||
| 327 | 325 | ||
| 328 | @Override | 326 | @Override |
| 329 | public void evaluate(QueryRecord queryRecord) { | 327 | public void evaluate(QueryRecord queryRecord) { |
| 330 | OWLOntology knowledgebase = relevantPart(queryRecord); | 328 | OWLOntology knowledgeBase = relevantPart(queryRecord); |
| 331 | 329 | ||
| 332 | if (knowledgebase == null) { | 330 | if (knowledgeBase == null) { |
| 333 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); | 331 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); |
| 334 | return ; | 332 | return ; |
| 335 | } | 333 | } |
| 336 | 334 | ||
| 337 | int aboxcount = knowledgebase.getABoxAxioms(true).size(); | 335 | int aBoxCount = knowledgeBase.getABoxAxioms(true).size(); |
| 338 | Utility.logDebug("ABox axioms: " + aboxcount + " TBox axioms: " + (knowledgebase.getAxiomCount() - aboxcount)); | 336 | Utility.logDebug("ABox axioms: " + aBoxCount + " TBox axioms: " + (knowledgeBase.getAxiomCount() - aBoxCount)); |
| 339 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); | 337 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); |
| 340 | 338 | ||
| 341 | Timer t = new Timer(); | 339 | Timer t = new Timer(); |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java index 326bf7e..97bab50 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | |||
| @@ -25,8 +25,8 @@ public abstract class QueryReasoner { | |||
| 25 | private static boolean defaultMultiStages = true; | 25 | private static boolean defaultMultiStages = true; |
| 26 | private static boolean defaultEqualities = true; | 26 | private static boolean defaultEqualities = true; |
| 27 | 27 | ||
| 28 | public static enum Type { Full, RLU, ELHOU }; | 28 | public enum Type { Full, RLU, ELHOU } |
| 29 | 29 | ||
| 30 | public static QueryReasoner getInstance(Properties p) { | 30 | public static QueryReasoner getInstance(Properties p) { |
| 31 | OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); | 31 | OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); |
| 32 | QueryReasoner pagoda = getInstance(ontology, p); | 32 | QueryReasoner pagoda = getInstance(ontology, p); |
| @@ -63,7 +63,7 @@ public abstract class QueryReasoner { | |||
| 63 | } | 63 | } |
| 64 | 64 | ||
| 65 | public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { | 65 | public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { |
| 66 | Utility.initialise(); | 66 | // Utility.initialise(); |
| 67 | QueryReasoner reasoner; | 67 | QueryReasoner reasoner; |
| 68 | if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); | 68 | if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); |
| 69 | else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); | 69 | else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); |
| @@ -218,9 +218,10 @@ public abstract class QueryReasoner { | |||
| 218 | record.outputAnswerStatistics(); | 218 | record.outputAnswerStatistics(); |
| 219 | record.outputTimes(); | 219 | record.outputTimes(); |
| 220 | } | 220 | } |
| 221 | // TODO it can handle one call only | 221 | /* TODO it can handle one call only |
| 222 | // if you call twice, you will end up with a json file with multiple roots | 222 | if you call twice, you will end up with a json file with multiple roots */ |
| 223 | if(answerWriter != null) gson.toJson(queryRecords, answerWriter); | 223 | if(answerWriter != null) gson.toJson(queryRecords, answerWriter); |
| 224 | // queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record))); | ||
| 224 | queryRecords.stream().forEach(record -> record.dispose()); | 225 | queryRecords.stream().forEach(record -> record.dispose()); |
| 225 | } | 226 | } |
| 226 | 227 | ||
| @@ -232,7 +233,7 @@ public abstract class QueryReasoner { | |||
| 232 | e.printStackTrace(); | 233 | e.printStackTrace(); |
| 233 | } | 234 | } |
| 234 | } | 235 | } |
| 235 | Utility.cleanup(); | 236 | // Utility.cleanup(); |
| 236 | } | 237 | } |
| 237 | 238 | ||
| 238 | private QueryManager m_queryManager = new QueryManager(); | 239 | private QueryManager m_queryManager = new QueryManager(); |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java index f70dde9..f068164 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java | |||
| @@ -1,20 +1,22 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | 1 | package uk.ac.ox.cs.pagoda.reasoner.light; |
| 2 | 2 | ||
| 3 | import java.io.File; | 3 | import org.semanticweb.karma2.MyKarma; |
| 4 | import java.io.FileNotFoundException; | ||
| 5 | import java.util.*; | ||
| 6 | |||
| 7 | import org.semanticweb.karma2.*; | ||
| 8 | import org.semanticweb.karma2.clausifier.OntologyProcesser; | 4 | import org.semanticweb.karma2.clausifier.OntologyProcesser; |
| 9 | import org.semanticweb.karma2.exception.IllegalInputOntologyException; | 5 | import org.semanticweb.karma2.exception.IllegalInputOntologyException; |
| 10 | import org.semanticweb.karma2.model.ConjunctiveQuery; | 6 | import org.semanticweb.karma2.model.ConjunctiveQuery; |
| 11 | import org.semanticweb.owlapi.model.OWLOntology; | 7 | import org.semanticweb.owlapi.model.OWLOntology; |
| 12 | |||
| 13 | import uk.ac.ox.cs.pagoda.query.*; | ||
| 14 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | ||
| 15 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 16 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | 8 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; |
| 17 | import uk.ac.ox.cs.JRDFox.store.DataStore; | 9 | import uk.ac.ox.cs.JRDFox.store.DataStore; |
| 10 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 11 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 12 | import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp; | ||
| 13 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | ||
| 14 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 15 | |||
| 16 | import java.io.File; | ||
| 17 | import java.io.FileNotFoundException; | ||
| 18 | import java.nio.file.Paths; | ||
| 19 | import java.util.Set; | ||
| 18 | 20 | ||
| 19 | public class KarmaQueryEngine extends RDFoxQueryEngine { | 21 | public class KarmaQueryEngine extends RDFoxQueryEngine { |
| 20 | 22 | ||
| @@ -29,8 +31,8 @@ public class KarmaQueryEngine extends RDFoxQueryEngine { | |||
| 29 | // int index = (new Random().nextInt() % Base + Base) % Base; | 31 | // int index = (new Random().nextInt() % Base + Base) % Base; |
| 30 | // karmaDataFile = "karma_data" + index + ".ttl"; | 32 | // karmaDataFile = "karma_data" + index + ".ttl"; |
| 31 | // karmaRuleFile = "karma_rule" + index + ".dlog"; | 33 | // karmaRuleFile = "karma_rule" + index + ".dlog"; |
| 32 | karmaDataFile = Utility.TempDirectory + "karma_data.ttl"; | 34 | karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString(); |
| 33 | karmaRuleFile = Utility.TempDirectory + "karma_rule.dlog"; | 35 | karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString(); |
| 34 | 36 | ||
| 35 | reasoner = new MyKarma(); | 37 | reasoner = new MyKarma(); |
| 36 | } | 38 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java index 70d0cc9..63773d9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java | |||
| @@ -1,8 +1,9 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | 1 | package uk.ac.ox.cs.pagoda.reasoner.light; |
| 2 | 2 | ||
| 3 | import java.io.File; | 3 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; |
| 4 | import java.util.Collection; | 4 | import uk.ac.ox.cs.JRDFox.Prefixes; |
| 5 | 5 | import uk.ac.ox.cs.JRDFox.store.DataStore; | |
| 6 | import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType; | ||
| 6 | import uk.ac.ox.cs.pagoda.MyPrefixes; | 7 | import uk.ac.ox.cs.pagoda.MyPrefixes; |
| 7 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | 8 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; |
| 8 | import uk.ac.ox.cs.pagoda.reasoner.QueryEngine; | 9 | import uk.ac.ox.cs.pagoda.reasoner.QueryEngine; |
| @@ -10,16 +11,19 @@ import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; | |||
| 10 | import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; | 11 | import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; |
| 11 | import uk.ac.ox.cs.pagoda.util.Timer; | 12 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 12 | import uk.ac.ox.cs.pagoda.util.Utility; | 13 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 13 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | 14 | |
| 14 | import uk.ac.ox.cs.JRDFox.Prefixes; | 15 | import java.io.File; |
| 15 | import uk.ac.ox.cs.JRDFox.store.DataStore; | 16 | import java.util.Collection; |
| 16 | import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType; | ||
| 17 | 17 | ||
| 18 | public abstract class RDFoxQueryEngine implements QueryEngine { | 18 | public abstract class RDFoxQueryEngine implements QueryEngine { |
| 19 | 19 | ||
| 20 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; | 20 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; |
| 21 | 21 | ||
| 22 | protected String name; | 22 | public String getName() { |
| 23 | return name; | ||
| 24 | } | ||
| 25 | |||
| 26 | protected String name; | ||
| 23 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); | 27 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); |
| 24 | 28 | ||
| 25 | public RDFoxQueryEngine(String name) { | 29 | public RDFoxQueryEngine(String name) { |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java index 232bc31..85f8ef9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java | |||
| @@ -1,5 +1,8 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | 1 | package uk.ac.ox.cs.pagoda.reasoner.light; |
| 2 | 2 | ||
| 3 | import net.sf.ehcache.Cache; | ||
| 4 | import net.sf.ehcache.CacheManager; | ||
| 5 | import net.sf.ehcache.Element; | ||
| 3 | import org.semanticweb.HermiT.model.*; | 6 | import org.semanticweb.HermiT.model.*; |
| 4 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | 7 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; |
| 5 | import uk.ac.ox.cs.JRDFox.model.Datatype; | 8 | import uk.ac.ox.cs.JRDFox.model.Datatype; |
| @@ -11,11 +14,20 @@ import uk.ac.ox.cs.JRDFox.store.Resource; | |||
| 11 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | 14 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; |
| 12 | import uk.ac.ox.cs.pagoda.util.Namespace; | 15 | import uk.ac.ox.cs.pagoda.util.Namespace; |
| 13 | 16 | ||
| 14 | import java.util.*; | 17 | import java.util.Collection; |
| 18 | import java.util.HashMap; | ||
| 19 | import java.util.Map; | ||
| 15 | 20 | ||
| 16 | public class RDFoxTripleManager { | 21 | public class RDFoxTripleManager { |
| 17 | 22 | ||
| 18 | UpdateType m_incrementally; | 23 | private final Cache termsCache; |
| 24 | private static final int TERMS_CACHE_SIZE = 10000; | ||
| 25 | private static final int CACHE_TTL_DEFAULT = 0; | ||
| 26 | private static final int CACHE_TTI_DEFAULT = 0; | ||
| 27 | private static final boolean CACHE_ETERNAL = true; | ||
| 28 | private static final boolean CACHE_USE_DISK = false; | ||
| 29 | |||
| 30 | UpdateType m_incrementally; | ||
| 19 | // boolean m_incrementally; | 31 | // boolean m_incrementally; |
| 20 | 32 | ||
| 21 | DataStore m_store; | 33 | DataStore m_store; |
| @@ -24,7 +36,19 @@ public class RDFoxTripleManager { | |||
| 24 | 36 | ||
| 25 | public RDFoxTripleManager(DataStore store, boolean incrementally) { | 37 | public RDFoxTripleManager(DataStore store, boolean incrementally) { |
| 26 | m_store = store; | 38 | m_store = store; |
| 27 | // m_incrementally = incrementally; | 39 | // m_incrementally = incrementally; |
| 40 | |||
| 41 | CacheManager cacheManager = CacheManager.getInstance(); | ||
| 42 | String cacheName = "RDFoxTripleManager_" + store.hashCode(); | ||
| 43 | if(! cacheManager.cacheExists(cacheName)) { | ||
| 44 | termsCache = new Cache(cacheName, | ||
| 45 | TERMS_CACHE_SIZE, CACHE_USE_DISK, CACHE_ETERNAL, | ||
| 46 | CACHE_TTL_DEFAULT, CACHE_TTI_DEFAULT); | ||
| 47 | cacheManager.addCache(termsCache); | ||
| 48 | } | ||
| 49 | else | ||
| 50 | termsCache = cacheManager.getCache(cacheName); | ||
| 51 | |||
| 28 | if (incrementally) | 52 | if (incrementally) |
| 29 | m_incrementally = UpdateType.ScheduleForAddition; | 53 | m_incrementally = UpdateType.ScheduleForAddition; |
| 30 | else | 54 | else |
| @@ -164,29 +188,25 @@ public class RDFoxTripleManager { | |||
| 164 | return m_dict.resolveResources(lexicalForms, types)[0]; | 188 | return m_dict.resolveResources(lexicalForms, types)[0]; |
| 165 | } | 189 | } |
| 166 | 190 | ||
| 167 | Map<Term, Integer> termCache = new HashMap<Term, Integer>(); | 191 | // Map<Term, Integer> termCache = new HashMap<Term, Integer>(); |
| 168 | Queue<Term> termList = new LinkedList<Term>(); | 192 | // Queue<Term> termQueue = new LinkedList<Term>(); |
| 169 | int sizeLimit = 10000; | ||
| 170 | 193 | ||
| 171 | private int getResourceID(Term arg, Map<Variable, Integer> assignment) { | 194 | private int getResourceID(Term arg, Map<Variable, Integer> assignment) { |
| 172 | // FIXME infinite loop | ||
| 173 | // while (termCache.size() > sizeLimit) | ||
| 174 | // termCache.remove(termList.poll()); | ||
| 175 | |||
| 176 | if (arg instanceof Variable) return assignment.get(arg); | 195 | if (arg instanceof Variable) return assignment.get(arg); |
| 177 | Integer id = null; | 196 | int id = -1; |
| 178 | if ((id = termCache.get(arg)) != null) | 197 | if(termsCache.isKeyInCache(arg)) |
| 179 | return id; | 198 | return ((int) termsCache.get(arg).getObjectValue()); |
| 180 | 199 | ||
| 181 | // if (arg instanceof Individual) { | 200 | // if (arg instanceof Individual) { |
| 182 | try { | 201 | try { |
| 183 | if (arg instanceof Individual) | 202 | if (arg instanceof Individual) |
| 184 | termCache.put(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())); | 203 | termsCache.put(new Element(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value()))); |
| 185 | else if (arg instanceof Constant) | 204 | else if (arg instanceof Constant) |
| 186 | termCache.put(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))); | 205 | termsCache.put(new Element(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI())))); |
| 187 | 206 | ||
| 188 | } catch (JRDFStoreException e) { | 207 | } catch (JRDFStoreException e) { |
| 189 | e.printStackTrace(); | 208 | e.printStackTrace(); |
| 209 | System.exit(1); | ||
| 190 | } | 210 | } |
| 191 | // } | 211 | // } |
| 192 | 212 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/rules/Program.java b/src/uk/ac/ox/cs/pagoda/rules/Program.java index 83cd21a..2e5302b 100644 --- a/src/uk/ac/ox/cs/pagoda/rules/Program.java +++ b/src/uk/ac/ox/cs/pagoda/rules/Program.java | |||
| @@ -1,39 +1,10 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.rules; | 1 | package uk.ac.ox.cs.pagoda.rules; |
| 2 | 2 | ||
| 3 | import java.io.BufferedWriter; | ||
| 4 | import java.io.File; | ||
| 5 | import java.io.FileNotFoundException; | ||
| 6 | import java.io.FileOutputStream; | ||
| 7 | import java.io.IOException; | ||
| 8 | import java.io.OutputStreamWriter; | ||
| 9 | import java.util.Collection; | ||
| 10 | import java.util.HashMap; | ||
| 11 | import java.util.HashSet; | ||
| 12 | import java.util.Iterator; | ||
| 13 | import java.util.LinkedList; | ||
| 14 | import java.util.List; | ||
| 15 | import java.util.Map; | ||
| 16 | import java.util.Set; | ||
| 17 | |||
| 18 | import org.semanticweb.HermiT.Configuration; | 3 | import org.semanticweb.HermiT.Configuration; |
| 19 | import org.semanticweb.HermiT.model.AnnotatedEquality; | 4 | import org.semanticweb.HermiT.model.*; |
| 20 | import org.semanticweb.HermiT.model.Atom; | ||
| 21 | import org.semanticweb.HermiT.model.AtomicConcept; | ||
| 22 | import org.semanticweb.HermiT.model.AtomicDataRange; | ||
| 23 | import org.semanticweb.HermiT.model.AtomicNegationDataRange; | ||
| 24 | import org.semanticweb.HermiT.model.AtomicRole; | ||
| 25 | import org.semanticweb.HermiT.model.ConstantEnumeration; | ||
| 26 | import org.semanticweb.HermiT.model.DLClause; | ||
| 27 | import org.semanticweb.HermiT.model.DLOntology; | ||
| 28 | import org.semanticweb.HermiT.model.DLPredicate; | ||
| 29 | import org.semanticweb.HermiT.model.Equality; | ||
| 30 | import org.semanticweb.HermiT.model.Inequality; | ||
| 31 | import org.semanticweb.HermiT.model.InverseRole; | ||
| 32 | import org.semanticweb.HermiT.model.Term; | ||
| 33 | import org.semanticweb.HermiT.model.Variable; | ||
| 34 | import org.semanticweb.HermiT.structural.OWLClausification; | 5 | import org.semanticweb.HermiT.structural.OWLClausification; |
| 35 | import org.semanticweb.owlapi.model.*; | 6 | import org.semanticweb.owlapi.model.*; |
| 36 | 7 | import org.semanticweb.simpleETL.SimpleETL; | |
| 37 | import uk.ac.ox.cs.pagoda.MyPrefixes; | 8 | import uk.ac.ox.cs.pagoda.MyPrefixes; |
| 38 | import uk.ac.ox.cs.pagoda.approx.KnowledgeBase; | 9 | import uk.ac.ox.cs.pagoda.approx.KnowledgeBase; |
| 39 | import uk.ac.ox.cs.pagoda.approx.RLPlusOntology; | 10 | import uk.ac.ox.cs.pagoda.approx.RLPlusOntology; |
| @@ -44,7 +15,8 @@ import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; | |||
| 44 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | 15 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; |
| 45 | import uk.ac.ox.cs.pagoda.util.Utility; | 16 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 46 | 17 | ||
| 47 | import org.semanticweb.simpleETL.SimpleETL; | 18 | import java.io.*; |
| 19 | import java.util.*; | ||
| 48 | 20 | ||
| 49 | public abstract class Program implements KnowledgeBase { | 21 | public abstract class Program implements KnowledgeBase { |
| 50 | 22 | ||
| @@ -377,7 +349,7 @@ public abstract class Program implements KnowledgeBase { | |||
| 377 | } | 349 | } |
| 378 | 350 | ||
| 379 | public final String getDirectory() { | 351 | public final String getDirectory() { |
| 380 | return Utility.TempDirectory; | 352 | return Utility.getGlobalTempDirAbsolutePath(); |
| 381 | } | 353 | } |
| 382 | 354 | ||
| 383 | public void deleteABoxTurtleFile() { | 355 | public void deleteABoxTurtleFile() { |
diff --git a/src/uk/ac/ox/cs/pagoda/util/Utility.java b/src/uk/ac/ox/cs/pagoda/util/Utility.java index b3a4df1..7b23e0d 100644 --- a/src/uk/ac/ox/cs/pagoda/util/Utility.java +++ b/src/uk/ac/ox/cs/pagoda/util/Utility.java | |||
| @@ -5,9 +5,9 @@ import org.apache.log4j.Logger; | |||
| 5 | import org.semanticweb.HermiT.model.Atom; | 5 | import org.semanticweb.HermiT.model.Atom; |
| 6 | 6 | ||
| 7 | import java.io.*; | 7 | import java.io.*; |
| 8 | import java.nio.file.Files; | ||
| 9 | import java.nio.file.Path; | ||
| 8 | import java.text.SimpleDateFormat; | 10 | import java.text.SimpleDateFormat; |
| 9 | import java.time.LocalDateTime; | ||
| 10 | import java.time.format.DateTimeFormatter; | ||
| 11 | import java.util.*; | 11 | import java.util.*; |
| 12 | 12 | ||
| 13 | public class Utility { | 13 | public class Utility { |
| @@ -21,8 +21,9 @@ public class Utility { | |||
| 21 | public static final String JAVA_FILE_SEPARATOR = "/"; | 21 | public static final String JAVA_FILE_SEPARATOR = "/"; |
| 22 | public static final String FILE_SEPARATOR = System.getProperty("file.separator"); | 22 | public static final String FILE_SEPARATOR = System.getProperty("file.separator"); |
| 23 | public static final String LINE_SEPARATOR = System.getProperty("line.separator"); | 23 | public static final String LINE_SEPARATOR = System.getProperty("line.separator"); |
| 24 | 24 | ||
| 25 | public static final String TempDirectory = (new File("tmp" + DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(LocalDateTime.now()))).getAbsolutePath() + FILE_SEPARATOR; | 25 | private static final String TEMP_DIR_PATH= "pagoda_tmp"; |
| 26 | private static String tempDir; | ||
| 26 | 27 | ||
| 27 | public static final int TEST = -1; | 28 | public static final int TEST = -1; |
| 28 | public static final int FLY = 0; | 29 | public static final int FLY = 0; |
| @@ -31,6 +32,20 @@ public class Utility { | |||
| 31 | public static final int AEO = 3; | 32 | public static final int AEO = 3; |
| 32 | public static final int WINE = 4; | 33 | public static final int WINE = 4; |
| 33 | 34 | ||
| 35 | public static String getGlobalTempDirAbsolutePath() { | ||
| 36 | if(tempDir == null) { | ||
| 37 | try { | ||
| 38 | Path path = Files.createTempDirectory(TEMP_DIR_PATH); | ||
| 39 | tempDir = path.toString(); | ||
| 40 | new File(tempDir).deleteOnExit(); | ||
| 41 | } catch (IOException e) { | ||
| 42 | e.printStackTrace(); | ||
| 43 | System.exit(1); | ||
| 44 | } | ||
| 45 | } | ||
| 46 | return tempDir; | ||
| 47 | } | ||
| 48 | |||
| 34 | public static Set<Atom> toSet(Atom[] data) | 49 | public static Set<Atom> toSet(Atom[] data) |
| 35 | { | 50 | { |
| 36 | HashSet<Atom> ret = new HashSet<Atom>(); | 51 | HashSet<Atom> ret = new HashSet<Atom>(); |
| @@ -228,19 +243,19 @@ public class Utility { | |||
| 228 | LOGS.error(getLogMessage(messages)); | 243 | LOGS.error(getLogMessage(messages)); |
| 229 | } | 244 | } |
| 230 | 245 | ||
| 231 | public static void initialise() { | 246 | // public static void initialise() { |
| 232 | File tmp = new File(TempDirectory); | 247 | // File tmp = new File(TempDirectory); |
| 233 | if (!tmp.exists()) tmp.mkdirs(); | 248 | // if (!tmp.exists()) tmp.mkdirs(); |
| 234 | } | 249 | // } |
| 235 | 250 | // | |
| 236 | public static void cleanup() { | 251 | // public static void cleanup() { |
| 237 | File tmp = new File(TempDirectory); | 252 | // File tmp = new File(TempDirectory); |
| 238 | if (tmp.exists()) { | 253 | // if (tmp.exists()) { |
| 239 | for (File file: tmp.listFiles()) | 254 | // for (File file: tmp.listFiles()) |
| 240 | file.delete(); | 255 | // file.delete(); |
| 241 | tmp.delete(); | 256 | // tmp.delete(); |
| 242 | } | 257 | // } |
| 243 | } | 258 | // } |
| 244 | 259 | ||
| 245 | public static String toFileIRI(String path) { | 260 | public static String toFileIRI(String path) { |
| 246 | String iri; | 261 | String iri; |
