diff options
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner')
7 files changed, 202 insertions, 195 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java index 409a2c9..ef9338a 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java | |||
| @@ -30,43 +30,42 @@ public class ConsistencyManager { | |||
| 30 | protected MyQueryReasoner m_reasoner; | 30 | protected MyQueryReasoner m_reasoner; |
| 31 | protected QueryManager m_queryManager; | 31 | protected QueryManager m_queryManager; |
| 32 | 32 | ||
| 33 | Timer t = new Timer(); | 33 | Timer t = new Timer(); |
| 34 | QueryRecord fullQueryRecord; | ||
| 35 | QueryRecord[] botQueryRecords; | ||
| 36 | LinkedList<DLClause> toAddClauses; | ||
| 37 | boolean fragmentExtracted = false; | ||
| 34 | 38 | ||
| 35 | public ConsistencyManager(MyQueryReasoner reasoner) { | 39 | public ConsistencyManager(MyQueryReasoner reasoner) { |
| 36 | m_reasoner = reasoner; | 40 | m_reasoner = reasoner; |
| 37 | m_queryManager = reasoner.getQueryManager(); | 41 | m_queryManager = reasoner.getQueryManager(); |
| 38 | } | 42 | } |
| 39 | 43 | ||
| 40 | QueryRecord fullQueryRecord; | ||
| 41 | QueryRecord[] botQueryRecords; | ||
| 42 | |||
| 43 | LinkedList<DLClause> toAddClauses; | ||
| 44 | |||
| 45 | boolean checkRLLowerBound() { | 44 | boolean checkRLLowerBound() { |
| 46 | fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); | 45 | fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); |
| 47 | AnswerTuples iter = null; | 46 | AnswerTuples iter = null; |
| 48 | 47 | ||
| 49 | try { | 48 | try { |
| 50 | iter = m_reasoner.rlLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | 49 | iter = m_reasoner.rlLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); |
| 51 | fullQueryRecord.updateLowerBoundAnswers(iter); | 50 | fullQueryRecord.updateLowerBoundAnswers(iter); |
| 52 | } finally { | 51 | } finally { |
| 53 | iter.dispose(); | 52 | iter.dispose(); |
| 54 | } | 53 | } |
| 55 | 54 | ||
| 56 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | 55 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { |
| 57 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | 56 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); |
| 58 | return false; | 57 | return false; |
| 59 | } | 58 | } |
| 60 | return true; | 59 | return true; |
| 61 | } | 60 | } |
| 62 | 61 | ||
| 63 | boolean checkELLowerBound() { | 62 | boolean checkELLowerBound() { |
| 64 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | 63 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); |
| 65 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | 64 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { |
| 66 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | 65 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); |
| 67 | return true; | 66 | return true; |
| 68 | } | 67 | } |
| 69 | return true; | 68 | return true; |
| 70 | } | 69 | } |
| 71 | 70 | ||
| 72 | boolean checkUpper(BasicQueryEngine upperStore) { | 71 | boolean checkUpper(BasicQueryEngine upperStore) { |
| @@ -91,65 +90,64 @@ public class ConsistencyManager { | |||
| 91 | fullQueryRecord.dispose(); | 90 | fullQueryRecord.dispose(); |
| 92 | } | 91 | } |
| 93 | 92 | ||
| 93 | // protected boolean unsatisfiability(double duration) { | ||
| 94 | // fullQueryRecord.dispose(); | ||
| 95 | // Utility.logDebug("The ontology and dataset is unsatisfiable."); | ||
| 96 | // return false; | ||
| 97 | // } | ||
| 98 | |||
| 99 | // protected boolean satisfiability(double duration) { | ||
| 100 | // fullQueryRecord.dispose(); | ||
| 101 | // Utility.logDebug("The ontology and dataset is satisfiable."); | ||
| 102 | // return true; | ||
| 103 | // } | ||
| 104 | |||
| 94 | boolean check() { | 105 | boolean check() { |
| 95 | // if (!checkRLLowerBound()) return false; | 106 | // if (!checkRLLowerBound()) return false; |
| 96 | // if (!checkELLowerBound()) return false; | 107 | // if (!checkELLowerBound()) return false; |
| 97 | // if (checkLazyUpper()) return true; | 108 | // if (checkLazyUpper()) return true; |
| 98 | AnswerTuples iter = null; | 109 | AnswerTuples iter = null; |
| 99 | 110 | ||
| 100 | try { | 111 | try { |
| 101 | iter = m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | 112 | iter = |
| 113 | m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | ||
| 102 | fullQueryRecord.updateUpperBoundAnswers(iter); | 114 | fullQueryRecord.updateUpperBoundAnswers(iter); |
| 103 | } finally { | 115 | } finally { |
| 104 | if (iter != null) iter.dispose(); | 116 | if(iter != null) iter.dispose(); |
| 105 | } | 117 | } |
| 106 | 118 | ||
| 107 | if (fullQueryRecord.getNoOfCompleteAnswers() == 0) | 119 | if(fullQueryRecord.getNoOfCompleteAnswers() == 0) |
| 108 | return true; | 120 | return true; |
| 109 | 121 | ||
| 110 | extractBottomFragment(); | 122 | extractBottomFragment(); |
| 111 | 123 | ||
| 112 | try { | 124 | try { |
| 113 | extractAxioms4Full(); | 125 | extractAxioms4Full(); |
| 114 | } catch (OWLOntologyCreationException e) { | 126 | } catch(OWLOntologyCreationException e) { |
| 115 | e.printStackTrace(); | 127 | e.printStackTrace(); |
| 116 | } | 128 | } |
| 117 | // fullQueryRecord.saveRelevantClause(); | 129 | // fullQueryRecord.saveRelevantClause(); |
| 118 | 130 | ||
| 119 | boolean satisfiability; | 131 | boolean satisfiability; |
| 120 | 132 | ||
| 121 | Checker checker; | 133 | Checker checker; |
| 122 | for (QueryRecord r: getQueryRecords()) { | 134 | for(QueryRecord r : getQueryRecords()) { |
| 123 | // TODO to be removed ... | 135 | // TODO to be removed ... |
| 124 | // r.saveRelevantOntology("bottom" + r.getQueryID() + ".owl"); | 136 | // r.saveRelevantOntology("bottom" + r.getQueryID() + ".owl"); |
| 125 | checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r); | 137 | checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r); |
| 126 | satisfiability = checker.isConsistent(); | 138 | satisfiability = checker.isConsistent(); |
| 127 | checker.dispose(); | 139 | checker.dispose(); |
| 128 | if (!satisfiability) return false; | 140 | if(!satisfiability) return false; |
| 129 | } | 141 | } |
| 130 | 142 | ||
| 131 | // Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); | 143 | // Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); |
| 132 | // boolean satisfiable = checker.isConsistent(); | 144 | // boolean satisfiable = checker.isConsistent(); |
| 133 | // checker.dispose(); | 145 | // checker.dispose(); |
| 134 | // if (!satisfiable) return unsatisfiability(t.duration()); | 146 | // if (!satisfiable) return unsatisfiability(t.duration()); |
| 135 | 147 | ||
| 136 | return true; | 148 | return true; |
| 137 | } | 149 | } |
| 138 | 150 | ||
| 139 | // protected boolean unsatisfiability(double duration) { | ||
| 140 | // fullQueryRecord.dispose(); | ||
| 141 | // Utility.logDebug("The ontology and dataset is unsatisfiable."); | ||
| 142 | // return false; | ||
| 143 | // } | ||
| 144 | |||
| 145 | // protected boolean satisfiability(double duration) { | ||
| 146 | // fullQueryRecord.dispose(); | ||
| 147 | // Utility.logDebug("The ontology and dataset is satisfiable."); | ||
| 148 | // return true; | ||
| 149 | // } | ||
| 150 | |||
| 151 | boolean fragmentExtracted = false; | ||
| 152 | |||
| 153 | public void extractBottomFragment() { | 151 | public void extractBottomFragment() { |
| 154 | if (fragmentExtracted) return ; | 152 | if (fragmentExtracted) return ; |
| 155 | fragmentExtracted = true; | 153 | fragmentExtracted = true; |
| @@ -179,7 +177,7 @@ public class ConsistencyManager { | |||
| 179 | int[] group = new int[number - 1]; | 177 | int[] group = new int[number - 1]; |
| 180 | for (int i = 0; i < number - 1; ++i) group[i] = i; | 178 | for (int i = 0; i < number - 1; ++i) group[i] = i; |
| 181 | for (int i = 0; i < number - 1; ++i) | 179 | for (int i = 0; i < number - 1; ++i) |
| 182 | if (tempQueryRecords[i].processed()) tempQueryRecords[i].dispose(); | 180 | if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose(); |
| 183 | else if (group[i] == i) { | 181 | else if (group[i] == i) { |
| 184 | ++bottomNumber; | 182 | ++bottomNumber; |
| 185 | record = tempQueryRecords[i]; | 183 | record = tempQueryRecords[i]; |
| @@ -193,8 +191,8 @@ public class ConsistencyManager { | |||
| 193 | int bottomCounter = 0; | 191 | int bottomCounter = 0; |
| 194 | botQueryRecords = new QueryRecord[bottomNumber]; | 192 | botQueryRecords = new QueryRecord[bottomNumber]; |
| 195 | Variable X = Variable.create("X"); | 193 | Variable X = Variable.create("X"); |
| 196 | for (int i = 0; i < number - 1; ++i) | 194 | for (int i = 0; i < number - 1; ++i) |
| 197 | if (!tempQueryRecords[i].processed()) | 195 | if(!tempQueryRecords[i].isProcessed()) |
| 198 | if (group[i] == i) { | 196 | if (group[i] == i) { |
| 199 | botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; | 197 | botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; |
| 200 | record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter); | 198 | record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter); |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java index 34742c8..c74ea58 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | |||
| @@ -68,7 +68,7 @@ class ELHOUQueryReasoner extends QueryReasoner { | |||
| 68 | } | 68 | } |
| 69 | queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); | 69 | queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); |
| 70 | 70 | ||
| 71 | if (queryRecord.processed()) { | 71 | if(queryRecord.isProcessed()) { |
| 72 | queryRecord.setDifficulty(Step.UPPER_BOUND); | 72 | queryRecord.setDifficulty(Step.UPPER_BOUND); |
| 73 | return; | 73 | return; |
| 74 | } | 74 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java b/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java index 447a92d..7847e7c 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java | |||
| @@ -1,9 +1,6 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | 1 | package uk.ac.ox.cs.pagoda.reasoner; |
| 2 | 2 | ||
| 3 | import java.io.File; | ||
| 4 | |||
| 5 | import org.semanticweb.owlapi.model.OWLOntology; | 3 | import org.semanticweb.owlapi.model.OWLOntology; |
| 6 | |||
| 7 | import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; | 4 | import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; |
| 8 | import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; | 5 | import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; |
| 9 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; | 6 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; |
| @@ -14,6 +11,8 @@ import uk.ac.ox.cs.pagoda.rules.GeneralProgram; | |||
| 14 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | 11 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; |
| 15 | import uk.ac.ox.cs.pagoda.util.Utility; | 12 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 16 | 13 | ||
| 14 | import java.io.File; | ||
| 15 | |||
| 17 | public class IterativeRefinement { | 16 | public class IterativeRefinement { |
| 18 | 17 | ||
| 19 | private static final int depthLimit = 1; | 18 | private static final int depthLimit = 1; |
| @@ -23,16 +22,15 @@ public class IterativeRefinement { | |||
| 23 | BasicQueryEngine m_trackingStore; | 22 | BasicQueryEngine m_trackingStore; |
| 24 | QueryRecord[] botQueryRecords; | 23 | QueryRecord[] botQueryRecords; |
| 25 | 24 | ||
| 26 | int m_depth = 0; | 25 | int m_depth = 0; |
| 27 | 26 | String tempDataFile = "temp.ttl"; | |
| 27 | |||
| 28 | public IterativeRefinement(QueryRecord queryRecord, QueryTracker tracker, BasicQueryEngine trackingStore, QueryRecord[] botQueryRecords) { | 28 | public IterativeRefinement(QueryRecord queryRecord, QueryTracker tracker, BasicQueryEngine trackingStore, QueryRecord[] botQueryRecords) { |
| 29 | m_record = queryRecord; | 29 | m_record = queryRecord; |
| 30 | m_tracker = tracker; | 30 | m_tracker = tracker; |
| 31 | m_trackingStore = trackingStore; | 31 | m_trackingStore = trackingStore; |
| 32 | this.botQueryRecords = botQueryRecords; | 32 | this.botQueryRecords = botQueryRecords; |
| 33 | } | 33 | } |
| 34 | |||
| 35 | String tempDataFile = "temp.ttl"; | ||
| 36 | 34 | ||
| 37 | public OWLOntology extractWithFullABox(String dataset, BottomStrategy upperBottom) { | 35 | public OWLOntology extractWithFullABox(String dataset, BottomStrategy upperBottom) { |
| 38 | GeneralProgram program; | 36 | GeneralProgram program; |
| @@ -58,8 +56,8 @@ public class IterativeRefinement { | |||
| 58 | } finally { | 56 | } finally { |
| 59 | tEngine.dispose(); | 57 | tEngine.dispose(); |
| 60 | } | 58 | } |
| 61 | 59 | ||
| 62 | if (m_record.processed()) | 60 | if(m_record.isProcessed()) |
| 63 | return null; | 61 | return null; |
| 64 | 62 | ||
| 65 | if (!update) break; | 63 | if (!update) break; |
| @@ -95,8 +93,8 @@ public class IterativeRefinement { | |||
| 95 | } finally { | 93 | } finally { |
| 96 | tEngine.dispose(); | 94 | tEngine.dispose(); |
| 97 | } | 95 | } |
| 98 | 96 | ||
| 99 | if (m_record.processed()) | 97 | if(m_record.isProcessed()) |
| 100 | return null; | 98 | return null; |
| 101 | 99 | ||
| 102 | if (!update) break; | 100 | if (!update) break; |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index cc0e647..b5b9534 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | |||
| @@ -23,7 +23,6 @@ import uk.ac.ox.cs.pagoda.util.Utility; | |||
| 23 | import uk.ac.ox.cs.pagoda.util.tuples.Tuple; | 23 | import uk.ac.ox.cs.pagoda.util.tuples.Tuple; |
| 24 | 24 | ||
| 25 | import java.util.Collection; | 25 | import java.util.Collection; |
| 26 | import java.util.HashMap; | ||
| 27 | 26 | ||
| 28 | class MyQueryReasoner extends QueryReasoner { | 27 | class MyQueryReasoner extends QueryReasoner { |
| 29 | 28 | ||
| @@ -33,7 +32,7 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 33 | // String additonalDataFile; | 32 | // String additonalDataFile; |
| 34 | BasicQueryEngine rlLowerStore = null; | 33 | BasicQueryEngine rlLowerStore = null; |
| 35 | BasicQueryEngine lazyUpperStore = null; | 34 | BasicQueryEngine lazyUpperStore = null; |
| 36 | BasicQueryEngine limitedSkolemUpperStore; | 35 | // BasicQueryEngine limitedSkolemUpperStore; |
| 37 | OWLOntology elho_ontology; | 36 | OWLOntology elho_ontology; |
| 38 | // boolean[] namedIndividuals_lazyUpper; | 37 | // boolean[] namedIndividuals_lazyUpper; |
| 39 | KarmaQueryEngine elLowerStore = null; | 38 | KarmaQueryEngine elLowerStore = null; |
| @@ -99,7 +98,7 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 99 | useUpperStores = multiStageTag && !program.getGeneral().isHorn(); | 98 | useUpperStores = multiStageTag && !program.getGeneral().isHorn(); |
| 100 | if(useUpperStores) { | 99 | if(useUpperStores) { |
| 101 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); | 100 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); |
| 102 | limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); | 101 | // limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); |
| 103 | } | 102 | } |
| 104 | 103 | ||
| 105 | importData(program.getAdditionalDataFile()); | 104 | importData(program.getAdditionalDataFile()); |
| @@ -147,20 +146,20 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 147 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | 146 | Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 148 | } | 147 | } |
| 149 | 148 | ||
| 150 | if(limitedSkolemUpperStore != null) { | 149 | // if(limitedSkolemUpperStore != null) { |
| 151 | limitedSkolemUpperStore.importRDFData(name, datafile); | 150 | // limitedSkolemUpperStore.importRDFData(name, datafile); |
| 152 | limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); | 151 | // limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); |
| 153 | int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); | 152 | // int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); |
| 154 | if(tag != 1) { | 153 | // if(tag != 1) { |
| 155 | limitedSkolemUpperStore.dispose(); | 154 | // limitedSkolemUpperStore.dispose(); |
| 156 | limitedSkolemUpperStore = null; | 155 | // limitedSkolemUpperStore = null; |
| 157 | } | 156 | // } |
| 158 | if(tag == -1) return false; | 157 | // if(tag == -1) return false; |
| 159 | } | 158 | // } |
| 160 | if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { | 159 | // if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { |
| 161 | satisfiable = SatisfiabilityStatus.SATISFIABLE; | 160 | // satisfiable = SatisfiabilityStatus.SATISFIABLE; |
| 162 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | 161 | // Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 163 | } | 162 | // } |
| 164 | 163 | ||
| 165 | trackingStore.importRDFData(name, datafile); | 164 | trackingStore.importRDFData(name, datafile); |
| 166 | trackingStore.materialise("saturate named individuals", originalMarkProgram); | 165 | trackingStore.materialise("saturate named individuals", originalMarkProgram); |
| @@ -214,7 +213,7 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 214 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 213 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 215 | 214 | ||
| 216 | queryRecord.addProcessingTime(step, t.duration()); | 215 | queryRecord.addProcessingTime(step, t.duration()); |
| 217 | if(queryRecord.processed()) { | 216 | if(queryRecord.isProcessed()) { |
| 218 | queryRecord.setDifficulty(step); | 217 | queryRecord.setDifficulty(step); |
| 219 | return true; | 218 | return true; |
| 220 | } | 219 | } |
| @@ -224,7 +223,7 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 224 | /** | 223 | /** |
| 225 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. | 224 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. |
| 226 | * */ | 225 | * */ |
| 227 | private OWLOntology relevantPart(QueryRecord queryRecord) { | 226 | private boolean queryBounds(QueryRecord queryRecord) { |
| 228 | AnswerTuples rlAnswer = null, elAnswer = null; | 227 | AnswerTuples rlAnswer = null, elAnswer = null; |
| 229 | 228 | ||
| 230 | t.reset(); | 229 | t.reset(); |
| @@ -243,15 +242,15 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 243 | 242 | ||
| 244 | Utility.logDebug("Tracking store"); | 243 | Utility.logDebug("Tracking store"); |
| 245 | if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) | 244 | if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) |
| 246 | return null; | 245 | return true; |
| 247 | 246 | ||
| 248 | if(!queryRecord.isBottom()) { | 247 | if(!queryRecord.isBottom()) { |
| 249 | Utility.logDebug("Lazy store"); | 248 | Utility.logDebug("Lazy store"); |
| 250 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) | 249 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) |
| 251 | return null; | 250 | return true; |
| 252 | Utility.logDebug("Skolem store"); | 251 | // Utility.logDebug("Skolem store"); |
| 253 | if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND)) | 252 | // if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND)) |
| 254 | return null; | 253 | // return null; |
| 255 | } | 254 | } |
| 256 | 255 | ||
| 257 | t.reset(); | 256 | t.reset(); |
| @@ -266,52 +265,23 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 266 | } | 265 | } |
| 267 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); | 266 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); |
| 268 | 267 | ||
| 269 | if (queryRecord.processed()) { | 268 | if(queryRecord.isProcessed()) { |
| 270 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); | 269 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); |
| 271 | return null; | 270 | return true; |
| 272 | } | 271 | } |
| 273 | 272 | ||
| 273 | return false; | ||
| 274 | } | ||
| 275 | |||
| 276 | private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { | ||
| 274 | t.reset(); | 277 | t.reset(); |
| 275 | 278 | ||
| 276 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); | 279 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); |
| 280 | OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true); | ||
| 277 | 281 | ||
| 278 | OWLOntology knowledgeBase; | ||
| 279 | t.reset(); | ||
| 280 | // if (program.getGeneral().isHorn()) { | ||
| 281 | // knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true); | ||
| 282 | // queryRecord.addProcessingTime(Step.Fragment, t.duration()); | ||
| 283 | // return knowledgebase; | ||
| 284 | // } | ||
| 285 | // else { | ||
| 286 | knowledgeBase = tracker.extract(trackingStore, consistency.getQueryRecords(), true); | ||
| 287 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); | 282 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); |
| 288 | // } | ||
| 289 | |||
| 290 | if(knowledgeBase.isEmpty() || queryRecord.isBottom()) | ||
| 291 | return knowledgeBase; | ||
| 292 | |||
| 293 | if(program.getGeneral().isHorn()) return knowledgeBase; | ||
| 294 | 283 | ||
| 295 | // t.reset(); | 284 | return relevantOntologySubset; |
| 296 | // if (queryRecord.isHorn() && lazyUpperStore != null) { | ||
| 297 | //// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true); | ||
| 298 | // } else if (queryRecord.getArity() < 3) { | ||
| 299 | // IterativeRefinement iterativeRefinement = new IterativeRefinement(queryRecord, tracker, trackingStore, consistency.getQueryRecords()); | ||
| 300 | // knowledgebase = iterativeRefinement.extractWithFullABox(importedData.toString(), program.getUpperBottomStrategy()); | ||
| 301 | // } | ||
| 302 | // | ||
| 303 | // queryRecord.addProcessingTime(Step.FRAGMENT_REFINEMENT, t.duration()); | ||
| 304 | // | ||
| 305 | // if (knowledgebase == null) | ||
| 306 | // queryRecord.setDifficulty(Step.FRAGMENT_REFINEMENT); | ||
| 307 | |||
| 308 | return knowledgeBase; | ||
| 309 | } | ||
| 310 | |||
| 311 | private String toJsonKeyValuePair(String key, Object value) { | ||
| 312 | HashMap<String, Object> map = new HashMap<>(); | ||
| 313 | map.put(key, value); | ||
| 314 | return QueryRecord.GsonCreator.getInstance().toJson(map); | ||
| 315 | } | 285 | } |
| 316 | 286 | ||
| 317 | private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { | 287 | private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { |
| @@ -323,35 +293,59 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 323 | queryRecord.updateUpperBoundAnswers(rlAnswer); | 293 | queryRecord.updateUpperBoundAnswers(rlAnswer); |
| 324 | } finally { | 294 | } finally { |
| 325 | if(rlAnswer != null) rlAnswer.dispose(); | 295 | if(rlAnswer != null) rlAnswer.dispose(); |
| 326 | rlAnswer = null; | ||
| 327 | } | 296 | } |
| 328 | } | 297 | } |
| 329 | 298 | ||
| 330 | // int counter = 0; | ||
| 331 | |||
| 332 | @Override | 299 | @Override |
| 333 | public void evaluate(QueryRecord queryRecord) { | 300 | public void evaluate(QueryRecord queryRecord) { |
| 334 | OWLOntology knowledgeBase = relevantPart(queryRecord); | 301 | if(queryBounds(queryRecord)) |
| 335 | |||
| 336 | if(knowledgeBase == null) { | ||
| 337 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); | ||
| 338 | return; | 302 | return; |
| 339 | } | ||
| 340 | 303 | ||
| 341 | int aBoxCount = knowledgeBase.getABoxAxioms(true).size(); | 304 | OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); |
| 342 | Utility.logDebug("ABox axioms: " + aBoxCount + " TBox axioms: " + (knowledgeBase.getAxiomCount() - aBoxCount)); | 305 | |
| 306 | int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size(); | ||
| 307 | Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset | ||
| 308 | .getAxiomCount() - aBoxCount)); | ||
| 343 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); | 309 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); |
| 344 | 310 | ||
| 311 | if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) | ||
| 312 | return; | ||
| 313 | |||
| 345 | Timer t = new Timer(); | 314 | Timer t = new Timer(); |
| 346 | Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); | 315 | Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); |
| 347 | // int validNumber = | ||
| 348 | summarisedChecker.check(queryRecord.getGapAnswers()); | 316 | summarisedChecker.check(queryRecord.getGapAnswers()); |
| 349 | summarisedChecker.dispose(); | 317 | summarisedChecker.dispose(); |
| 350 | Utility.logDebug("Total time for full reasoner: " + t.duration()); | 318 | Utility.logDebug("Total time for full reasoner: " + t.duration()); |
| 351 | // if (validNumber == 0) { | ||
| 352 | queryRecord.markAsProcessed(); | 319 | queryRecord.markAsProcessed(); |
| 353 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); | 320 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); |
| 354 | // } | 321 | } |
| 322 | |||
| 323 | private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { | ||
| 324 | MultiStageQueryEngine relevantStore = | ||
| 325 | new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true | ||
| 326 | DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false | ||
| 327 | |||
| 328 | // relevantStore.importRDFData("data", importedData.toString()); // 2 answers more | ||
| 329 | relevantStore.importDataFromABoxOf(relevantSubset); | ||
| 330 | |||
| 331 | int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); | ||
| 332 | if(materialisationResult != 1) | ||
| 333 | throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency | ||
| 334 | // relevantStore.materialiseRestrictedly(relevantProgram, null); // it has been tried | ||
| 335 | |||
| 336 | return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); | ||
| 337 | |||
| 338 | // the following has been tried | ||
| 339 | // Tuple<String> extendedQueryText = queryRecord.getExtendedQueryText(); | ||
| 340 | // if(queryRecord.hasNonAnsDistinguishedVariables()) { | ||
| 341 | // queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(0), queryRecord.getAnswerVariables()); | ||
| 342 | // queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(1), queryRecord.getDistinguishedVariables()); | ||
| 343 | // } | ||
| 344 | // else | ||
| 345 | // queryUpperBound(relevantStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 346 | // | ||
| 347 | // return queryRecord.isProcessed(); | ||
| 348 | |||
| 355 | } | 349 | } |
| 356 | 350 | ||
| 357 | @Override | 351 | @Override |
| @@ -375,7 +369,8 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 375 | if (lazyUpperStore != null) lazyUpperStore.dispose(); | 369 | if (lazyUpperStore != null) lazyUpperStore.dispose(); |
| 376 | if (elLowerStore != null) elLowerStore.dispose(); | 370 | if (elLowerStore != null) elLowerStore.dispose(); |
| 377 | if (trackingStore != null) trackingStore.dispose(); | 371 | if (trackingStore != null) trackingStore.dispose(); |
| 378 | if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); | 372 | |
| 373 | // if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); | ||
| 379 | super.dispose(); | 374 | super.dispose(); |
| 380 | } | 375 | } |
| 381 | 376 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java index 64945e8..118c1b2 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | |||
| @@ -147,7 +147,7 @@ PagodaProperties properties; | |||
| 147 | if (forFacetGeneration) { | 147 | if (forFacetGeneration) { |
| 148 | QueryRecord record = m_queryManager.create(queryText); | 148 | QueryRecord record = m_queryManager.create(queryText); |
| 149 | Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); | 149 | Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); |
| 150 | if(!record.processed()) | 150 | if(!record.isProcessed()) |
| 151 | evaluateUpper(record); | 151 | evaluateUpper(record); |
| 152 | // AnswerTuples tuples = record.getUpperBoundAnswers(); | 152 | // AnswerTuples tuples = record.getUpperBoundAnswers(); |
| 153 | // for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { | 153 | // for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { |
| @@ -167,7 +167,7 @@ PagodaProperties properties; | |||
| 167 | public AnswerTuples evaluate(String queryText) { | 167 | public AnswerTuples evaluate(String queryText) { |
| 168 | QueryRecord record = m_queryManager.create(queryText); | 168 | QueryRecord record = m_queryManager.create(queryText); |
| 169 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | 169 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); |
| 170 | if(!record.processed()) | 170 | if(!record.isProcessed()) |
| 171 | evaluate(record); | 171 | evaluate(record); |
| 172 | AnswerTuples answer = record.getAnswers(); | 172 | AnswerTuples answer = record.getAnswers(); |
| 173 | record.dispose(); | 173 | record.dispose(); |
| @@ -178,7 +178,7 @@ PagodaProperties properties; | |||
| 178 | public void evaluate_shell(String queryText) { | 178 | public void evaluate_shell(String queryText) { |
| 179 | QueryRecord record = m_queryManager.create(queryText); | 179 | QueryRecord record = m_queryManager.create(queryText); |
| 180 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | 180 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); |
| 181 | if(!record.processed()) | 181 | if(!record.isProcessed()) |
| 182 | evaluate(record); | 182 | evaluate(record); |
| 183 | Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); | 183 | Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); |
| 184 | record.dispose(); | 184 | record.dispose(); |
| @@ -206,12 +206,12 @@ PagodaProperties properties; | |||
| 206 | // if (Integer.parseInt(record.getQueryID()) != 218) continue; | 206 | // if (Integer.parseInt(record.getQueryID()) != 218) continue; |
| 207 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", | 207 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", |
| 208 | record.getQueryText()); | 208 | record.getQueryText()); |
| 209 | if (!record.processed()) { | 209 | if(!record.isProcessed()) { |
| 210 | t.reset(); | 210 | t.reset(); |
| 211 | if (!record.processed()) | 211 | if(!record.isProcessed()) |
| 212 | evaluate(record); | 212 | evaluate(record); |
| 213 | Utility.logInfo("Total time to answer this query: " + t.duration()); | 213 | Utility.logInfo("Total time to answer this query: " + t.duration()); |
| 214 | if (!fullReasoner && !record.processed()) { | 214 | if(!fullReasoner && !record.isProcessed()) { |
| 215 | Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); | 215 | Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); |
| 216 | continue; | 216 | continue; |
| 217 | } | 217 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java index bdef436..547140a 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | |||
| @@ -53,8 +53,8 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 53 | if (ans != null) ans.dispose(); | 53 | if (ans != null) ans.dispose(); |
| 54 | } | 54 | } |
| 55 | queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); | 55 | queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); |
| 56 | 56 | ||
| 57 | if (queryRecord.processed()) | 57 | if(queryRecord.isProcessed()) |
| 58 | queryRecord.setDifficulty(Step.UPPER_BOUND); | 58 | queryRecord.setDifficulty(Step.UPPER_BOUND); |
| 59 | } | 59 | } |
| 60 | 60 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java index 63773d9..61500f5 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java | |||
| @@ -1,5 +1,7 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | 1 | package uk.ac.ox.cs.pagoda.reasoner.light; |
| 2 | 2 | ||
| 3 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 4 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; | ||
| 3 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | 5 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; |
| 4 | import uk.ac.ox.cs.JRDFox.Prefixes; | 6 | import uk.ac.ox.cs.JRDFox.Prefixes; |
| 5 | import uk.ac.ox.cs.JRDFox.store.DataStore; | 7 | import uk.ac.ox.cs.JRDFox.store.DataStore; |
| @@ -18,32 +20,44 @@ import java.util.Collection; | |||
| 18 | public abstract class RDFoxQueryEngine implements QueryEngine { | 20 | public abstract class RDFoxQueryEngine implements QueryEngine { |
| 19 | 21 | ||
| 20 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; | 22 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; |
| 21 | |||
| 22 | public String getName() { | ||
| 23 | return name; | ||
| 24 | } | ||
| 25 | |||
| 26 | protected String name; | 23 | protected String name; |
| 27 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); | 24 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); |
| 28 | 25 | ||
| 29 | public RDFoxQueryEngine(String name) { | 26 | public RDFoxQueryEngine(String name) { |
| 30 | this.name = name; | 27 | this.name = name; |
| 31 | } | 28 | } |
| 32 | 29 | ||
| 33 | public abstract DataStore getDataStore(); | 30 | public static DataStore createDataStore() { |
| 31 | DataStore instance = null; | ||
| 32 | try { | ||
| 33 | // instance = new DataStore("par-head-n"); | ||
| 34 | instance = new DataStore(StoreType.NarrowParallelHead); | ||
| 35 | instance.setNumberOfThreads(matNoOfThreads); | ||
| 36 | instance.initialize(); | ||
| 37 | } catch(JRDFStoreException e) { | ||
| 38 | e.printStackTrace(); | ||
| 39 | } | ||
| 40 | return instance; | ||
| 41 | } | ||
| 34 | 42 | ||
| 35 | public abstract void dispose(); | 43 | public String getName() { |
| 44 | return name; | ||
| 45 | } | ||
| 36 | 46 | ||
| 47 | public abstract DataStore getDataStore(); | ||
| 48 | |||
| 49 | public abstract void dispose(); | ||
| 50 | |||
| 37 | public void importRDFData(String fileName, String importedFile) { | 51 | public void importRDFData(String fileName, String importedFile) { |
| 38 | if (importedFile == null || importedFile.isEmpty()) return ; | 52 | if(importedFile == null || importedFile.isEmpty()) return; |
| 39 | Timer t = new Timer(); | 53 | Timer t = new Timer(); |
| 40 | DataStore store = getDataStore(); | 54 | DataStore store = getDataStore(); |
| 41 | try { | 55 | try { |
| 42 | long oldTripleCount = store.getTriplesCount(), tripleCount; | 56 | long oldTripleCount = store.getTriplesCount(), tripleCount; |
| 43 | for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) { | 57 | for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) { |
| 44 | store.importTurtleFile(new File(file), prefixes); | 58 | store.importTurtleFile(new File(file), prefixes); |
| 45 | } | 59 | } |
| 46 | tripleCount = store.getTriplesCount(); | 60 | tripleCount = store.getTriplesCount(); |
| 47 | Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); | 61 | Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); |
| 48 | store.clearRulesAndMakeFactsExplicit(); | 62 | store.clearRulesAndMakeFactsExplicit(); |
| 49 | } catch (JRDFStoreException e) { | 63 | } catch (JRDFStoreException e) { |
| @@ -51,17 +65,32 @@ public abstract class RDFoxQueryEngine implements QueryEngine { | |||
| 51 | } | 65 | } |
| 52 | Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); | 66 | Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); |
| 53 | } | 67 | } |
| 54 | 68 | ||
| 69 | public void importDataFromABoxOf(OWLOntology ontology) { | ||
| 70 | DataStore store = getDataStore(); | ||
| 71 | try { | ||
| 72 | long prevTriplesCount = store.getTriplesCount(); | ||
| 73 | store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); | ||
| 74 | long loadedTriples = store.getTriplesCount() - prevTriplesCount; | ||
| 75 | Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) | ||
| 76 | .size() + " ABox axioms"); | ||
| 77 | } catch(JRDFStoreException | OWLOntologyCreationException e) { | ||
| 78 | e.printStackTrace(); | ||
| 79 | System.exit(1); | ||
| 80 | } | ||
| 81 | |||
| 82 | } | ||
| 83 | |||
| 55 | public void materialise(String programName, String programText) { | 84 | public void materialise(String programName, String programText) { |
| 56 | if (programText == null) return ; | 85 | if(programText == null) return; |
| 57 | Timer t = new Timer(); | 86 | Timer t = new Timer(); |
| 58 | DataStore store = getDataStore(); | 87 | DataStore store = getDataStore(); |
| 59 | try { | 88 | try { |
| 60 | long oldTripleCount = store.getTriplesCount(), tripleCount; | 89 | long oldTripleCount = store.getTriplesCount(), tripleCount; |
| 61 | // store.addRules(new String[] {programText}); | 90 | // store.addRules(new String[] {programText}); |
| 62 | store.importRules(programText); | 91 | store.importRules(programText); |
| 63 | store.applyReasoning(); | 92 | store.applyReasoning(); |
| 64 | tripleCount = store.getTriplesCount(); | 93 | tripleCount = store.getTriplesCount(); |
| 65 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); | 94 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); |
| 66 | store.clearRulesAndMakeFactsExplicit(); | 95 | store.clearRulesAndMakeFactsExplicit(); |
| 67 | } catch (JRDFStoreException e) { | 96 | } catch (JRDFStoreException e) { |
| @@ -74,17 +103,17 @@ public abstract class RDFoxQueryEngine implements QueryEngine { | |||
| 74 | public void evaluate(Collection<String> queryTexts, String answerFile) { | 103 | public void evaluate(Collection<String> queryTexts, String answerFile) { |
| 75 | if (queryTexts == null) | 104 | if (queryTexts == null) |
| 76 | return ; | 105 | return ; |
| 77 | 106 | ||
| 78 | int queryID = 0; | 107 | int queryID = 0; |
| 79 | AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); | 108 | AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); |
| 80 | AnswerTuples answerTuples; | 109 | AnswerTuples answerTuples; |
| 81 | Timer t = new Timer(); | 110 | Timer t = new Timer(); |
| 82 | try { | 111 | try { |
| 83 | for (String query: queryTexts) { | 112 | for (String query: queryTexts) { |
| 84 | t.reset(); | 113 | t.reset(); |
| 85 | answerTuples = null; | 114 | answerTuples = null; |
| 86 | try { | 115 | try { |
| 87 | answerTuples = evaluate(query); | 116 | answerTuples = evaluate(query); |
| 88 | Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); | 117 | Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); |
| 89 | answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); | 118 | answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); |
| 90 | } finally { | 119 | } finally { |
| @@ -94,22 +123,9 @@ public abstract class RDFoxQueryEngine implements QueryEngine { | |||
| 94 | } finally { | 123 | } finally { |
| 95 | answerWriter.close(); | 124 | answerWriter.close(); |
| 96 | } | 125 | } |
| 97 | 126 | ||
| 98 | Utility.logDebug("done computing query answers by RDFox."); | 127 | Utility.logDebug("done computing query answers by RDFox."); |
| 99 | 128 | ||
| 100 | } | ||
| 101 | |||
| 102 | public static DataStore createDataStore() { | ||
| 103 | DataStore instance = null; | ||
| 104 | try { | ||
| 105 | // instance = new DataStore("par-head-n"); | ||
| 106 | instance = new DataStore(StoreType.NarrowParallelHead); | ||
| 107 | instance.setNumberOfThreads(matNoOfThreads); | ||
| 108 | instance.initialize(); | ||
| 109 | } catch (JRDFStoreException e) { | ||
| 110 | e.printStackTrace(); | ||
| 111 | } | ||
| 112 | return instance; | ||
| 113 | } | 129 | } |
| 114 | 130 | ||
| 115 | } | 131 | } |
