diff options
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner')
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java | 275 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java | 160 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | 352 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java | 196 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | 662 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java | 18 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | 436 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java | 10 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | 13 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java | 720 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java | 155 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java | 44 | ||||
| -rw-r--r-- | src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java | 219 |
13 files changed, 1663 insertions, 1597 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java index ef9338a..b4a1775 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java | |||
| @@ -22,10 +22,11 @@ import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | |||
| 22 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; | 22 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; |
| 23 | import uk.ac.ox.cs.pagoda.util.Timer; | 23 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 24 | import uk.ac.ox.cs.pagoda.util.Utility; | 24 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 25 | import uk.ac.ox.cs.pagoda.util.disposable.Disposable; | ||
| 25 | 26 | ||
| 26 | import java.util.LinkedList; | 27 | import java.util.LinkedList; |
| 27 | 28 | ||
| 28 | public class ConsistencyManager { | 29 | public class ConsistencyManager extends Disposable { |
| 29 | 30 | ||
| 30 | protected MyQueryReasoner m_reasoner; | 31 | protected MyQueryReasoner m_reasoner; |
| 31 | protected QueryManager m_queryManager; | 32 | protected QueryManager m_queryManager; |
| @@ -40,7 +41,109 @@ public class ConsistencyManager { | |||
| 40 | m_reasoner = reasoner; | 41 | m_reasoner = reasoner; |
| 41 | m_queryManager = reasoner.getQueryManager(); | 42 | m_queryManager = reasoner.getQueryManager(); |
| 42 | } | 43 | } |
| 43 | 44 | ||
| 45 | @Override | ||
| 46 | public void dispose() { | ||
| 47 | super.dispose(); | ||
| 48 | fullQueryRecord.dispose(); | ||
| 49 | } | ||
| 50 | |||
| 51 | public void extractBottomFragment() { | ||
| 52 | if(fragmentExtracted) return; | ||
| 53 | fragmentExtracted = true; | ||
| 54 | |||
| 55 | UpperDatalogProgram upperProgram = m_reasoner.program.getUpper(); | ||
| 56 | int number = upperProgram.getBottomNumber(); | ||
| 57 | |||
| 58 | if(number <= 1) { | ||
| 59 | botQueryRecords = new QueryRecord[]{fullQueryRecord}; | ||
| 60 | } | ||
| 61 | else { | ||
| 62 | QueryRecord[] tempQueryRecords = new QueryRecord[number - 1]; | ||
| 63 | QueryRecord record; | ||
| 64 | for(int i = 0; i < number - 1; ++i) { | ||
| 65 | tempQueryRecords[i] = record = | ||
| 66 | m_queryManager.create(QueryRecord.botQueryText.replace("Nothing", "Nothing" + (i + 1)), 0, i + 1); | ||
| 67 | AnswerTuples iter = null; | ||
| 68 | try { | ||
| 69 | iter = m_reasoner.trackingStore.evaluate(record.getQueryText(), record.getAnswerVariables()); | ||
| 70 | record.updateUpperBoundAnswers(iter); | ||
| 71 | } finally { | ||
| 72 | if(iter != null) iter.dispose(); | ||
| 73 | iter = null; | ||
| 74 | } | ||
| 75 | } | ||
| 76 | |||
| 77 | int bottomNumber = 0; | ||
| 78 | int[] group = new int[number - 1]; | ||
| 79 | for(int i = 0; i < number - 1; ++i) group[i] = i; | ||
| 80 | for(int i = 0; i < number - 1; ++i) | ||
| 81 | if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose(); | ||
| 82 | else if(group[i] == i) { | ||
| 83 | ++bottomNumber; | ||
| 84 | record = tempQueryRecords[i]; | ||
| 85 | for(int j = i + 1; j < number - 1; ++j) | ||
| 86 | if(record.hasSameGapAnswers(tempQueryRecords[j])) | ||
| 87 | group[j] = i; | ||
| 88 | } | ||
| 89 | |||
| 90 | Utility.logInfo("There are " + bottomNumber + " different bottom fragments."); | ||
| 91 | toAddClauses = new LinkedList<DLClause>(); | ||
| 92 | int bottomCounter = 0; | ||
| 93 | botQueryRecords = new QueryRecord[bottomNumber]; | ||
| 94 | Variable X = Variable.create("X"); | ||
| 95 | for(int i = 0; i < number - 1; ++i) | ||
| 96 | if(!tempQueryRecords[i].isDisposed() && !tempQueryRecords[i].isProcessed()) | ||
| 97 | if(group[i] == i) { | ||
| 98 | botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; | ||
| 99 | record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, | ||
| 100 | group[i] = bottomCounter); | ||
| 101 | toAddClauses.add( | ||
| 102 | DLClause.create( | ||
| 103 | new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + bottomCounter), X)}, | ||
| 104 | new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); | ||
| 105 | } | ||
| 106 | else { | ||
| 107 | toAddClauses.add( | ||
| 108 | DLClause.create( | ||
| 109 | new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + group[group[i]]), X)}, | ||
| 110 | new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); | ||
| 111 | tempQueryRecords[i].dispose(); | ||
| 112 | } | ||
| 113 | |||
| 114 | upperProgram.updateDependencyGraph(toAddClauses); | ||
| 115 | } | ||
| 116 | |||
| 117 | String[] programs = collectTrackingProgramAndImport(); | ||
| 118 | if(programs.length == 0) | ||
| 119 | return; | ||
| 120 | |||
| 121 | DataStore store = m_reasoner.trackingStore.getDataStore(); | ||
| 122 | long oldTripleCount, tripleCount; | ||
| 123 | try { | ||
| 124 | Timer t1 = new Timer(); | ||
| 125 | oldTripleCount = store.getTriplesCount(); | ||
| 126 | for(String program : programs) | ||
| 127 | store.importRules(program, UpdateType.ScheduleForAddition); | ||
| 128 | store.applyReasoning(true); | ||
| 129 | tripleCount = store.getTriplesCount(); | ||
| 130 | |||
| 131 | Utility.logInfo("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", | ||
| 132 | "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); | ||
| 133 | |||
| 134 | extractAxioms(); | ||
| 135 | store.clearRulesAndMakeFactsExplicit(); | ||
| 136 | } catch(JRDFStoreException e) { | ||
| 137 | e.printStackTrace(); | ||
| 138 | } catch(OWLOntologyCreationException e) { | ||
| 139 | e.printStackTrace(); | ||
| 140 | } | ||
| 141 | } | ||
| 142 | |||
| 143 | public QueryRecord[] getQueryRecords() { | ||
| 144 | return botQueryRecords; | ||
| 145 | } | ||
| 146 | |||
| 44 | boolean checkRLLowerBound() { | 147 | boolean checkRLLowerBound() { |
| 45 | fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); | 148 | fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); |
| 46 | AnswerTuples iter = null; | 149 | AnswerTuples iter = null; |
| @@ -59,9 +162,22 @@ public class ConsistencyManager { | |||
| 59 | return true; | 162 | return true; |
| 60 | } | 163 | } |
| 61 | 164 | ||
| 165 | // protected boolean unsatisfiability(double duration) { | ||
| 166 | // fullQueryRecord.dispose(); | ||
| 167 | // Utility.logDebug("The ontology and dataset is unsatisfiable."); | ||
| 168 | // return false; | ||
| 169 | // } | ||
| 170 | |||
| 171 | // protected boolean satisfiability(double duration) { | ||
| 172 | // fullQueryRecord.dispose(); | ||
| 173 | // Utility.logDebug("The ontology and dataset is satisfiable."); | ||
| 174 | // return true; | ||
| 175 | // } | ||
| 176 | |||
| 62 | boolean checkELLowerBound() { | 177 | boolean checkELLowerBound() { |
| 63 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | 178 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord |
| 64 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | 179 | .getAnswerVariables())); |
| 180 | if(fullQueryRecord.getNoOfSoundAnswers() > 0) { | ||
| 65 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | 181 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); |
| 66 | return true; | 182 | return true; |
| 67 | } | 183 | } |
| @@ -69,39 +185,22 @@ public class ConsistencyManager { | |||
| 69 | } | 185 | } |
| 70 | 186 | ||
| 71 | boolean checkUpper(BasicQueryEngine upperStore) { | 187 | boolean checkUpper(BasicQueryEngine upperStore) { |
| 72 | if (upperStore != null) { | 188 | if(upperStore != null) { |
| 73 | AnswerTuples tuples = null; | 189 | AnswerTuples tuples = null; |
| 74 | try { | 190 | try { |
| 75 | tuples = upperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | 191 | tuples = upperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); |
| 76 | if (!tuples.isValid()) { | 192 | if(!tuples.isValid()) { |
| 77 | Utility.logInfo("There are no contradictions derived in "+ upperStore.getName() +" materialisation."); | 193 | Utility.logInfo("There are no contradictions derived in " + upperStore.getName() + " materialisation."); |
| 78 | Utility.logDebug("The ontology and dataset is satisfiable."); | 194 | Utility.logDebug("The ontology and dataset is satisfiable."); |
| 79 | return true; | 195 | return true; |
| 80 | } | 196 | } |
| 81 | } | 197 | } finally { |
| 82 | finally { | 198 | if(tuples != null) tuples.dispose(); |
| 83 | if (tuples != null) tuples.dispose(); | ||
| 84 | } | 199 | } |
| 85 | } | 200 | } |
| 86 | return false; | 201 | return false; |
| 87 | } | 202 | } |
| 88 | 203 | ||
| 89 | void dispose() { | ||
| 90 | fullQueryRecord.dispose(); | ||
| 91 | } | ||
| 92 | |||
| 93 | // protected boolean unsatisfiability(double duration) { | ||
| 94 | // fullQueryRecord.dispose(); | ||
| 95 | // Utility.logDebug("The ontology and dataset is unsatisfiable."); | ||
| 96 | // return false; | ||
| 97 | // } | ||
| 98 | |||
| 99 | // protected boolean satisfiability(double duration) { | ||
| 100 | // fullQueryRecord.dispose(); | ||
| 101 | // Utility.logDebug("The ontology and dataset is satisfiable."); | ||
| 102 | // return true; | ||
| 103 | // } | ||
| 104 | |||
| 105 | boolean check() { | 204 | boolean check() { |
| 106 | // if (!checkRLLowerBound()) return false; | 205 | // if (!checkRLLowerBound()) return false; |
| 107 | // if (!checkELLowerBound()) return false; | 206 | // if (!checkELLowerBound()) return false; |
| @@ -148,148 +247,54 @@ public class ConsistencyManager { | |||
| 148 | return true; | 247 | return true; |
| 149 | } | 248 | } |
| 150 | 249 | ||
| 151 | public void extractBottomFragment() { | ||
| 152 | if (fragmentExtracted) return ; | ||
| 153 | fragmentExtracted = true; | ||
| 154 | |||
| 155 | UpperDatalogProgram upperProgram = m_reasoner.program.getUpper(); | ||
| 156 | int number = upperProgram.getBottomNumber(); | ||
| 157 | |||
| 158 | if (number <= 1) { | ||
| 159 | botQueryRecords = new QueryRecord[] { fullQueryRecord }; | ||
| 160 | } | ||
| 161 | else { | ||
| 162 | QueryRecord[] tempQueryRecords = new QueryRecord[number - 1]; | ||
| 163 | QueryRecord record; | ||
| 164 | for (int i = 0; i < number - 1; ++i) { | ||
| 165 | tempQueryRecords[i] = record = m_queryManager.create(QueryRecord.botQueryText.replace("Nothing", "Nothing" + (i + 1)), 0, i + 1); | ||
| 166 | AnswerTuples iter = null; | ||
| 167 | try { | ||
| 168 | iter = m_reasoner.trackingStore.evaluate(record.getQueryText(), record.getAnswerVariables()); | ||
| 169 | record.updateUpperBoundAnswers(iter); | ||
| 170 | } finally { | ||
| 171 | if (iter != null) iter.dispose(); | ||
| 172 | iter = null; | ||
| 173 | } | ||
| 174 | } | ||
| 175 | |||
| 176 | int bottomNumber = 0; | ||
| 177 | int[] group = new int[number - 1]; | ||
| 178 | for (int i = 0; i < number - 1; ++i) group[i] = i; | ||
| 179 | for (int i = 0; i < number - 1; ++i) | ||
| 180 | if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose(); | ||
| 181 | else if (group[i] == i) { | ||
| 182 | ++bottomNumber; | ||
| 183 | record = tempQueryRecords[i]; | ||
| 184 | for (int j = i + 1; j < number - 1; ++j) | ||
| 185 | if (record.hasSameGapAnswers(tempQueryRecords[j])) | ||
| 186 | group[j] = i; | ||
| 187 | } | ||
| 188 | |||
| 189 | Utility.logInfo("There are " + bottomNumber + " different bottom fragments."); | ||
| 190 | toAddClauses = new LinkedList<DLClause>(); | ||
| 191 | int bottomCounter = 0; | ||
| 192 | botQueryRecords = new QueryRecord[bottomNumber]; | ||
| 193 | Variable X = Variable.create("X"); | ||
| 194 | for (int i = 0; i < number - 1; ++i) | ||
| 195 | if(!tempQueryRecords[i].isProcessed()) | ||
| 196 | if (group[i] == i) { | ||
| 197 | botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; | ||
| 198 | record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter); | ||
| 199 | toAddClauses.add( | ||
| 200 | DLClause.create( | ||
| 201 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + bottomCounter), X)}, | ||
| 202 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); | ||
| 203 | } | ||
| 204 | else { | ||
| 205 | toAddClauses.add( | ||
| 206 | DLClause.create( | ||
| 207 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + group[group[i]]), X)}, | ||
| 208 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); | ||
| 209 | tempQueryRecords[i].dispose(); | ||
| 210 | } | ||
| 211 | |||
| 212 | upperProgram.updateDependencyGraph(toAddClauses); | ||
| 213 | } | ||
| 214 | |||
| 215 | String[] programs = collectTrackingProgramAndImport(); | ||
| 216 | if (programs.length == 0) | ||
| 217 | return ; | ||
| 218 | |||
| 219 | DataStore store = m_reasoner.trackingStore.getDataStore(); | ||
| 220 | long oldTripleCount, tripleCount; | ||
| 221 | try { | ||
| 222 | Timer t1 = new Timer(); | ||
| 223 | oldTripleCount = store.getTriplesCount(); | ||
| 224 | for (String program: programs) | ||
| 225 | store.importRules(program, UpdateType.ScheduleForAddition); | ||
| 226 | store.applyReasoning(true); | ||
| 227 | tripleCount = store.getTriplesCount(); | ||
| 228 | |||
| 229 | Utility.logInfo("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", | ||
| 230 | "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); | ||
| 231 | |||
| 232 | extractAxioms(); | ||
| 233 | store.clearRulesAndMakeFactsExplicit(); | ||
| 234 | } catch (JRDFStoreException e) { | ||
| 235 | e.printStackTrace(); | ||
| 236 | } catch (OWLOntologyCreationException e) { | ||
| 237 | e.printStackTrace(); | ||
| 238 | } | ||
| 239 | } | ||
| 240 | |||
| 241 | private void extractAxioms4Full() throws OWLOntologyCreationException { | 250 | private void extractAxioms4Full() throws OWLOntologyCreationException { |
| 242 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); | 251 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); |
| 243 | OWLOntology fullOntology = manager.createOntology(); | 252 | OWLOntology fullOntology = manager.createOntology(); |
| 244 | for (QueryRecord record: botQueryRecords) { | 253 | for (QueryRecord record: botQueryRecords) { |
| 245 | for (DLClause clause: record.getRelevantClauses()) { | 254 | for (DLClause clause: record.getRelevantClauses()) { |
| 246 | fullQueryRecord.addRelevantClauses(clause); | 255 | fullQueryRecord.addRelevantClauses(clause); |
| 247 | } | 256 | } |
| 248 | manager.addAxioms(fullOntology, record.getRelevantOntology().getAxioms()); | 257 | manager.addAxioms(fullOntology, record.getRelevantOntology().getAxioms()); |
| 249 | } | 258 | } |
| 250 | fullQueryRecord.setRelevantOntology(fullOntology); | 259 | fullQueryRecord.setRelevantOntology(fullOntology); |
| 251 | } | 260 | } |
| 252 | 261 | ||
| 253 | private void extractAxioms() throws OWLOntologyCreationException { | 262 | private void extractAxioms() throws OWLOntologyCreationException { |
| 254 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); | 263 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); |
| 255 | for (QueryRecord record: botQueryRecords) { | 264 | for (QueryRecord record: botQueryRecords) { |
| 256 | record.setRelevantOntology(manager.createOntology()); | 265 | record.setRelevantOntology(manager.createOntology()); |
| 257 | QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, record); | 266 | QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, record); |
| 258 | m_reasoner.encoder.setCurrentQuery(record); | 267 | m_reasoner.encoder.setCurrentQuery(record); |
| 259 | tracker.extractAxioms(m_reasoner.trackingStore); | 268 | tracker.extractAxioms(m_reasoner.trackingStore); |
| 260 | // record.saveRelevantClause(); | 269 | // record.saveRelevantClause(); |
| 261 | // record.saveRelevantOntology("bottom" + record.getQueryID() + ".owl"); | 270 | // record.saveRelevantOntology("bottom" + record.getQueryID() + ".owl"); |
| 262 | Utility.logInfo("finish extracting axioms for bottom " + record.getQueryID()); | 271 | Utility.logInfo("finish extracting axioms for bottom " + record.getQueryID()); |
| 263 | } | 272 | } |
| 264 | } | 273 | } |
| 265 | 274 | ||
| 266 | private String[] collectTrackingProgramAndImport() { | 275 | private String[] collectTrackingProgramAndImport() { |
| 267 | String[] programs = new String[botQueryRecords.length]; | 276 | String[] programs = new String[botQueryRecords.length]; |
| 268 | TrackingRuleEncoder encoder = m_reasoner.encoder; | 277 | TrackingRuleEncoder encoder = m_reasoner.encoder; |
| 269 | 278 | ||
| 270 | StringBuilder builder; | 279 | StringBuilder builder; |
| 271 | LinkedList<DLClause> currentClauses = new LinkedList<DLClause>(); | 280 | LinkedList<DLClause> currentClauses = new LinkedList<DLClause>(); |
| 272 | 281 | ||
| 273 | for (int i = 0; i < botQueryRecords.length; ++i) { | 282 | for (int i = 0; i < botQueryRecords.length; ++i) { |
| 274 | encoder.setCurrentQuery(botQueryRecords[i]); | 283 | encoder.setCurrentQuery(botQueryRecords[i]); |
| 275 | builder = new StringBuilder(encoder.getTrackingProgram()); | 284 | builder = new StringBuilder(encoder.getTrackingProgram()); |
| 276 | // encoder.saveTrackingRules("tracking_bottom" + (i + 1) + ".dlog"); | 285 | // encoder.saveTrackingRules("tracking_bottom" + (i + 1) + ".dlog"); |
| 277 | 286 | ||
| 278 | for (DLClause clause: toAddClauses) | 287 | for (DLClause clause: toAddClauses) |
| 279 | if (clause.getHeadAtom(0).getDLPredicate().toString().contains("_final" + (i + 1))) | 288 | if (clause.getHeadAtom(0).getDLPredicate().toString().contains("_final" + (i + 1))) |
| 280 | currentClauses.add(clause); | 289 | currentClauses.add(clause); |
| 281 | 290 | ||
| 282 | builder.append(DLClauseHelper.toString(currentClauses)); | 291 | builder.append(DLClauseHelper.toString(currentClauses)); |
| 283 | programs[i] = builder.toString(); | 292 | programs[i] = builder.toString(); |
| 284 | 293 | ||
| 285 | currentClauses.clear(); | 294 | currentClauses.clear(); |
| 286 | } | 295 | } |
| 287 | |||
| 288 | return programs; | ||
| 289 | } | ||
| 290 | 296 | ||
| 291 | public QueryRecord[] getQueryRecords() { | 297 | return programs; |
| 292 | return botQueryRecords; | ||
| 293 | } | 298 | } |
| 294 | 299 | ||
| 295 | 300 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java index f5a8093..0a151bc 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java | |||
| @@ -10,86 +10,92 @@ import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; | |||
| 10 | import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; | 10 | import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; |
| 11 | import uk.ac.ox.cs.pagoda.util.Timer; | 11 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 12 | import uk.ac.ox.cs.pagoda.util.Utility; | 12 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 13 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 13 | 14 | ||
| 14 | class ELHOQueryReasoner extends QueryReasoner { | 15 | class ELHOQueryReasoner extends QueryReasoner { |
| 15 | 16 | ||
| 16 | LowerDatalogProgram program; | 17 | LowerDatalogProgram program; |
| 17 | 18 | ||
| 18 | OWLOntology elho_ontology; | 19 | OWLOntology elho_ontology; |
| 19 | KarmaQueryEngine elLowerStore = null; | 20 | KarmaQueryEngine elLowerStore = null; |
| 20 | 21 | ||
| 21 | private Timer t = new Timer(); | 22 | private Timer t = new Timer(); |
| 22 | 23 | ||
| 23 | public ELHOQueryReasoner() { | 24 | public ELHOQueryReasoner() { |
| 24 | elLowerStore = new KarmaQueryEngine("el"); | 25 | elLowerStore = new KarmaQueryEngine("el"); |
| 25 | } | 26 | } |
| 26 | 27 | ||
| 27 | @Override | 28 | @Override |
| 28 | public void evaluate(QueryRecord queryRecord) { | 29 | public void evaluate(QueryRecord queryRecord) { |
| 29 | AnswerTuples elAnswer = null; | 30 | if(isDisposed()) throw new DisposedException(); |
| 30 | t.reset(); | 31 | AnswerTuples elAnswer = null; |
| 31 | try { | 32 | t.reset(); |
| 32 | elAnswer = elLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 33 | try { |
| 33 | queryRecord.updateLowerBoundAnswers(elAnswer); | 34 | elAnswer = elLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 34 | } finally { | 35 | queryRecord.updateLowerBoundAnswers(elAnswer); |
| 35 | if (elAnswer != null) elAnswer.dispose(); | 36 | } finally { |
| 36 | } | 37 | if(elAnswer != null) elAnswer.dispose(); |
| 37 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); | 38 | } |
| 38 | 39 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); | |
| 39 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); | 40 | |
| 40 | queryRecord.markAsProcessed(); | 41 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); |
| 41 | } | 42 | queryRecord.markAsProcessed(); |
| 42 | 43 | } | |
| 43 | @Override | 44 | |
| 44 | public void evaluateUpper(QueryRecord queryRecord) { | 45 | @Override |
| 45 | evaluate(queryRecord); | 46 | public void evaluateUpper(QueryRecord queryRecord) { |
| 46 | } | 47 | if(isDisposed()) throw new DisposedException(); |
| 47 | 48 | evaluate(queryRecord); | |
| 48 | @Override | 49 | } |
| 49 | public void dispose() { | 50 | |
| 50 | if (elLowerStore != null) elLowerStore.dispose(); | 51 | @Override |
| 51 | super.dispose(); | 52 | public void dispose() { |
| 52 | } | 53 | super.dispose(); |
| 53 | 54 | if(elLowerStore != null) elLowerStore.dispose(); | |
| 54 | @Override | 55 | } |
| 55 | public void loadOntology(OWLOntology ontology) { | 56 | |
| 56 | program = new LowerDatalogProgram(properties.getToClassify()); | 57 | @Override |
| 57 | program.load(ontology, new UnaryBottom()); | 58 | public void loadOntology(OWLOntology ontology) { |
| 58 | program.transform(); | 59 | if(isDisposed()) throw new DisposedException(); |
| 59 | 60 | program = new LowerDatalogProgram(properties.getToClassify()); | |
| 60 | importData(program.getAdditionalDataFile()); | 61 | program.load(ontology, new UnaryBottom()); |
| 61 | 62 | program.transform(); | |
| 62 | elho_ontology = new ELHOProfile().getFragment(ontology); | 63 | |
| 63 | elLowerStore.processOntology(elho_ontology); | 64 | importData(program.getAdditionalDataFile()); |
| 64 | } | 65 | |
| 65 | 66 | elho_ontology = new ELHOProfile().getFragment(ontology); | |
| 66 | @Override | 67 | elLowerStore.processOntology(elho_ontology); |
| 67 | public boolean preprocess() { | 68 | } |
| 68 | elLowerStore.importRDFData("data", importedData.toString()); | 69 | |
| 69 | String rlLowerProgramText = program.toString(); | 70 | @Override |
| 71 | public boolean preprocess() { | ||
| 72 | if(isDisposed()) throw new DisposedException(); | ||
| 73 | elLowerStore.importRDFData("data", importedData.toString()); | ||
| 74 | String rlLowerProgramText = program.toString(); | ||
| 70 | // program.save(); | 75 | // program.save(); |
| 71 | elLowerStore.materialise("lower program", rlLowerProgramText); | 76 | elLowerStore.materialise("lower program", rlLowerProgramText); |
| 72 | elLowerStore.initialiseKarma(); | 77 | elLowerStore.initialiseKarma(); |
| 73 | 78 | ||
| 74 | if (!isConsistent()) { | 79 | if(!isConsistent()) { |
| 75 | Utility.logDebug("The dataset is not consistent with the ontology."); | 80 | Utility.logDebug("The dataset is not consistent with the ontology."); |
| 76 | return false; | 81 | return false; |
| 77 | } | 82 | } |
| 78 | return true; | 83 | return true; |
| 79 | } | 84 | } |
| 80 | 85 | ||
| 81 | @Override | 86 | @Override |
| 82 | public boolean isConsistent() { | 87 | public boolean isConsistent() { |
| 83 | String[] X = new String[] {"X"}; | 88 | if(isDisposed()) throw new DisposedException(); |
| 84 | AnswerTuples ans = null; | 89 | String[] X = new String[]{"X"}; |
| 85 | try { | 90 | AnswerTuples ans = null; |
| 86 | ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); | 91 | try { |
| 87 | if (ans.isValid()) return false; | 92 | ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); |
| 88 | } finally { | 93 | if(ans.isValid()) return false; |
| 89 | if (ans != null) ans.dispose(); | 94 | } finally { |
| 90 | } | 95 | if(ans != null) ans.dispose(); |
| 91 | 96 | } | |
| 92 | return true; | 97 | |
| 93 | } | 98 | return true; |
| 99 | } | ||
| 94 | 100 | ||
| 95 | } | 101 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java index c74ea58..771190e 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | |||
| @@ -13,181 +13,187 @@ import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; | |||
| 13 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | 13 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; |
| 14 | import uk.ac.ox.cs.pagoda.util.Timer; | 14 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 15 | import uk.ac.ox.cs.pagoda.util.Utility; | 15 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 16 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 16 | 17 | ||
| 17 | class ELHOUQueryReasoner extends QueryReasoner { | 18 | class ELHOUQueryReasoner extends QueryReasoner { |
| 18 | 19 | ||
| 19 | DatalogProgram program; | 20 | DatalogProgram program; |
| 20 | 21 | ||
| 21 | BasicQueryEngine rlLowerStore; | 22 | BasicQueryEngine rlLowerStore; |
| 22 | BasicQueryEngine rlUpperStore; | 23 | BasicQueryEngine rlUpperStore; |
| 23 | 24 | ||
| 24 | OWLOntology elho_ontology; | 25 | OWLOntology elho_ontology; |
| 25 | KarmaQueryEngine elLowerStore = null; | 26 | KarmaQueryEngine elLowerStore = null; |
| 26 | 27 | ||
| 27 | boolean multiStageTag, equalityTag; | 28 | boolean multiStageTag, equalityTag; |
| 28 | String originalMarkProgram; | 29 | String originalMarkProgram; |
| 29 | private Timer t = new Timer(); | 30 | private Timer t = new Timer(); |
| 30 | 31 | ||
| 31 | public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | 32 | public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { |
| 32 | this.multiStageTag = multiStageTag; | 33 | this.multiStageTag = multiStageTag; |
| 33 | this.equalityTag = considerEqualities; | 34 | this.equalityTag = considerEqualities; |
| 34 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | 35 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); |
| 35 | elLowerStore = new KarmaQueryEngine("el-lower-bound"); | 36 | elLowerStore = new KarmaQueryEngine("el-lower-bound"); |
| 36 | 37 | ||
| 37 | if(!multiStageTag) | 38 | if(!multiStageTag) |
| 38 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); | 39 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); |
| 39 | else | 40 | else |
| 40 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); | 41 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); |
| 41 | } | 42 | } |
| 42 | 43 | ||
| 43 | @Override | 44 | @Override |
| 44 | public void evaluate(QueryRecord queryRecord) { | 45 | public void evaluate(QueryRecord queryRecord) { |
| 45 | AnswerTuples rlAnswer = null; | 46 | if(isDisposed()) throw new DisposedException(); |
| 46 | t.reset(); | 47 | AnswerTuples rlAnswer = null; |
| 47 | try { | 48 | t.reset(); |
| 48 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 49 | try { |
| 49 | queryRecord.updateLowerBoundAnswers(rlAnswer); | 50 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 50 | } finally { | 51 | queryRecord.updateLowerBoundAnswers(rlAnswer); |
| 51 | if(rlAnswer != null) rlAnswer.dispose(); | 52 | } finally { |
| 52 | } | 53 | if(rlAnswer != null) rlAnswer.dispose(); |
| 53 | queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); | 54 | } |
| 54 | 55 | queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); | |
| 55 | String extendedQueryText = queryRecord.getExtendedQueryText().get(0); | 56 | |
| 56 | String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ? | 57 | String extendedQueryText = queryRecord.getExtendedQueryText().get(0); |
| 57 | new String[]{queryRecord.getQueryText()} : | 58 | String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ? |
| 58 | new String[] {queryRecord.getQueryText(), extendedQueryText}; | 59 | new String[]{queryRecord.getQueryText()} : |
| 59 | 60 | new String[]{queryRecord.getQueryText(), extendedQueryText}; | |
| 60 | for (String queryText: toQuery) { | 61 | |
| 61 | rlAnswer = null; | 62 | for(String queryText : toQuery) { |
| 62 | t.reset(); | 63 | rlAnswer = null; |
| 63 | try { | 64 | t.reset(); |
| 64 | rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables()); | 65 | try { |
| 65 | queryRecord.updateUpperBoundAnswers(rlAnswer); | 66 | rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables()); |
| 66 | } finally { | 67 | queryRecord.updateUpperBoundAnswers(rlAnswer); |
| 67 | if(rlAnswer != null) rlAnswer.dispose(); | 68 | } finally { |
| 68 | } | 69 | if(rlAnswer != null) rlAnswer.dispose(); |
| 69 | queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); | 70 | } |
| 70 | 71 | queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); | |
| 71 | if(queryRecord.isProcessed()) { | 72 | |
| 72 | queryRecord.setDifficulty(Step.UPPER_BOUND); | 73 | if(queryRecord.isProcessed()) { |
| 73 | return; | 74 | queryRecord.setDifficulty(Step.UPPER_BOUND); |
| 74 | } | 75 | return; |
| 75 | } | 76 | } |
| 76 | 77 | } | |
| 77 | AnswerTuples elAnswer = null; | 78 | |
| 78 | t.reset(); | 79 | AnswerTuples elAnswer = null; |
| 79 | try { | 80 | t.reset(); |
| 80 | elAnswer = | 81 | try { |
| 81 | elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); | 82 | elAnswer = |
| 82 | queryRecord.updateLowerBoundAnswers(elAnswer); | 83 | elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); |
| 83 | } finally { | 84 | queryRecord.updateLowerBoundAnswers(elAnswer); |
| 84 | if (elAnswer != null) elAnswer.dispose(); | 85 | } finally { |
| 85 | } | 86 | if(elAnswer != null) elAnswer.dispose(); |
| 86 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); | 87 | } |
| 87 | } | 88 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); |
| 88 | 89 | } | |
| 89 | @Override | 90 | |
| 90 | public void evaluateUpper(QueryRecord queryRecord) { | 91 | @Override |
| 91 | AnswerTuples rlAnswer = null; | 92 | public void evaluateUpper(QueryRecord queryRecord) { |
| 92 | try { | 93 | if(isDisposed()) throw new DisposedException(); |
| 93 | rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 94 | AnswerTuples rlAnswer = null; |
| 94 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); | 95 | try { |
| 95 | } finally { | 96 | rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 96 | if(rlAnswer != null) rlAnswer.dispose(); | 97 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); |
| 97 | } | 98 | } finally { |
| 98 | } | 99 | if(rlAnswer != null) rlAnswer.dispose(); |
| 99 | 100 | } | |
| 100 | @Override | 101 | } |
| 101 | public void dispose() { | 102 | |
| 102 | if (elLowerStore != null) elLowerStore.dispose(); | 103 | @Override |
| 103 | if(rlUpperStore != null) rlUpperStore.dispose(); | 104 | public void dispose() { |
| 104 | super.dispose(); | 105 | super.dispose(); |
| 105 | } | 106 | if(elLowerStore != null) elLowerStore.dispose(); |
| 106 | 107 | if(rlUpperStore != null) rlUpperStore.dispose(); | |
| 107 | @Override | 108 | } |
| 108 | public void loadOntology(OWLOntology o) { | 109 | |
| 109 | if (!equalityTag) { | 110 | @Override |
| 110 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | 111 | public void loadOntology(OWLOntology o) { |
| 111 | o = eliminator.getOutputOntology(); | 112 | if(isDisposed()) throw new DisposedException(); |
| 112 | eliminator.save(); | 113 | if(!equalityTag) { |
| 113 | } | 114 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); |
| 114 | 115 | o = eliminator.getOutputOntology(); | |
| 115 | OWLOntology ontology = o; | 116 | eliminator.save(); |
| 116 | program = new DatalogProgram(ontology, properties.getToClassify()); | 117 | } |
| 117 | 118 | ||
| 118 | importData(program.getAdditionalDataFile()); | 119 | OWLOntology ontology = o; |
| 119 | 120 | program = new DatalogProgram(ontology, properties.getToClassify()); | |
| 120 | elho_ontology = new ELHOProfile().getFragment(ontology); | 121 | |
| 121 | elLowerStore.processOntology(elho_ontology); | 122 | importData(program.getAdditionalDataFile()); |
| 122 | originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); | 123 | |
| 123 | } | 124 | elho_ontology = new ELHOProfile().getFragment(ontology); |
| 124 | 125 | elLowerStore.processOntology(elho_ontology); | |
| 125 | @Override | 126 | originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); |
| 126 | public boolean preprocess() { | 127 | } |
| 127 | String name = "data", datafile = importedData.toString(); | 128 | |
| 128 | 129 | @Override | |
| 129 | String lowername = "lower program"; | 130 | public boolean preprocess() { |
| 130 | String rlLowerProgramText = program.getLower().toString(); | 131 | if(isDisposed()) throw new DisposedException(); |
| 131 | 132 | String name = "data", datafile = importedData.toString(); | |
| 132 | rlUpperStore.importRDFData(name, datafile); | 133 | |
| 133 | rlUpperStore.materialise("saturate named individuals", originalMarkProgram); | 134 | String lowername = "lower program"; |
| 134 | 135 | String rlLowerProgramText = program.getLower().toString(); | |
| 135 | int flag = rlUpperStore.materialiseRestrictedly(program, null); | 136 | |
| 136 | if (flag != 1) { | 137 | rlUpperStore.importRDFData(name, datafile); |
| 137 | if (flag == -1) return false; | 138 | rlUpperStore.materialise("saturate named individuals", originalMarkProgram); |
| 138 | rlUpperStore.dispose(); | 139 | |
| 139 | 140 | int flag = rlUpperStore.materialiseRestrictedly(program, null); | |
| 140 | if (!multiStageTag) | 141 | if(flag != 1) { |
| 141 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); | 142 | if(flag == -1) return false; |
| 142 | else | 143 | rlUpperStore.dispose(); |
| 143 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); | 144 | |
| 144 | rlUpperStore.importRDFData(name, datafile); | 145 | if(!multiStageTag) |
| 145 | rlUpperStore.materialise("saturate named individuals", originalMarkProgram); | 146 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); |
| 146 | rlUpperStore.materialiseFoldedly(program, null); | 147 | else |
| 147 | } | 148 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); |
| 148 | Utility.logInfo("upper store ready."); | 149 | rlUpperStore.importRDFData(name, datafile); |
| 149 | 150 | rlUpperStore.materialise("saturate named individuals", originalMarkProgram); | |
| 150 | rlLowerStore.importRDFData(name, datafile); | 151 | rlUpperStore.materialiseFoldedly(program, null); |
| 151 | rlLowerStore.materialise(lowername, rlLowerProgramText); | 152 | } |
| 152 | Utility.logInfo("lower store ready."); | 153 | Utility.logInfo("upper store ready."); |
| 153 | 154 | ||
| 154 | elLowerStore.importRDFData(name, datafile); | 155 | rlLowerStore.importRDFData(name, datafile); |
| 155 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); | 156 | rlLowerStore.materialise(lowername, rlLowerProgramText); |
| 156 | elLowerStore.materialise(lowername, rlLowerProgramText); | 157 | Utility.logInfo("lower store ready."); |
| 157 | 158 | ||
| 158 | elLowerStore.initialiseKarma(); | 159 | elLowerStore.importRDFData(name, datafile); |
| 159 | Utility.logInfo("EL lower store ready."); | 160 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); |
| 160 | 161 | elLowerStore.materialise(lowername, rlLowerProgramText); | |
| 161 | if (!isConsistent()) { | 162 | |
| 162 | Utility.logInfo("The dataset is not consistent with the ontology."); | 163 | elLowerStore.initialiseKarma(); |
| 163 | return false; | 164 | Utility.logInfo("EL lower store ready."); |
| 164 | } | 165 | |
| 165 | Utility.logInfo("The dataset is consistent."); | 166 | if(!isConsistent()) { |
| 166 | return true; | 167 | Utility.logInfo("The dataset is not consistent with the ontology."); |
| 167 | } | 168 | return false; |
| 168 | 169 | } | |
| 169 | @Override | 170 | Utility.logInfo("The dataset is consistent."); |
| 170 | public boolean isConsistent() { | 171 | return true; |
| 171 | Utility.logInfo("Start checking consistency... "); | 172 | } |
| 172 | String[] X = new String[] {"X"}; | 173 | |
| 173 | AnswerTuples ans = null; | 174 | @Override |
| 174 | try { | 175 | public boolean isConsistent() { |
| 175 | ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X); | 176 | if(isDisposed()) throw new DisposedException(); |
| 176 | if (!ans.isValid()) return true; | 177 | Utility.logInfo("Start checking consistency... "); |
| 177 | } finally { | 178 | String[] X = new String[]{"X"}; |
| 178 | if (ans != null) ans.dispose(); | 179 | AnswerTuples ans = null; |
| 179 | } | 180 | try { |
| 180 | 181 | ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X); | |
| 181 | ans = null; | 182 | if(!ans.isValid()) return true; |
| 182 | try { | 183 | } finally { |
| 183 | ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); | 184 | if(ans != null) ans.dispose(); |
| 184 | if (ans.isValid()) return false; | 185 | } |
| 185 | } finally { | 186 | |
| 186 | if (ans != null) ans.dispose(); | 187 | ans = null; |
| 187 | } | 188 | try { |
| 188 | 189 | ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); | |
| 189 | Utility.logDebug("The consistency of the data has not been determined yet."); | 190 | if(ans.isValid()) return false; |
| 190 | return true; | 191 | } finally { |
| 191 | } | 192 | if(ans != null) ans.dispose(); |
| 193 | } | ||
| 194 | |||
| 195 | Utility.logDebug("The consistency of the data has not been determined yet."); | ||
| 196 | return true; | ||
| 197 | } | ||
| 192 | 198 | ||
| 193 | } | 199 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java index d1856c9..78b9a0b 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java | |||
| @@ -10,6 +10,7 @@ import uk.ac.ox.cs.pagoda.query.*; | |||
| 10 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | 10 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; |
| 11 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | 11 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; |
| 12 | import uk.ac.ox.cs.pagoda.util.Utility; | 12 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 13 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 13 | 14 | ||
| 14 | import java.io.File; | 15 | import java.io.File; |
| 15 | import java.io.IOException; | 16 | import java.io.IOException; |
| @@ -17,100 +18,105 @@ import java.util.HashSet; | |||
| 17 | import java.util.Set; | 18 | import java.util.Set; |
| 18 | 19 | ||
| 19 | class HermiTReasoner extends QueryReasoner { | 20 | class HermiTReasoner extends QueryReasoner { |
| 20 | 21 | ||
| 21 | Reasoner hermit; | 22 | Reasoner hermit; |
| 22 | 23 | ||
| 23 | BasicQueryEngine upperStore = null; | 24 | BasicQueryEngine upperStore = null; |
| 24 | 25 | ||
| 25 | OWLOntology onto; | 26 | OWLOntology onto; |
| 26 | OWLDataFactory factory; | 27 | OWLDataFactory factory; |
| 27 | 28 | ||
| 28 | String importedOntologyPath = null; | 29 | String importedOntologyPath = null; |
| 29 | 30 | ||
| 30 | QueryRoller roller; | 31 | QueryRoller roller; |
| 31 | boolean toCheckSatisfiability; | 32 | boolean toCheckSatisfiability; |
| 32 | 33 | ||
| 33 | public HermiTReasoner(boolean toCheckSatisfiability) { | 34 | public HermiTReasoner(boolean toCheckSatisfiability) { |
| 34 | this.toCheckSatisfiability = toCheckSatisfiability; | 35 | this.toCheckSatisfiability = toCheckSatisfiability; |
| 35 | } | 36 | } |
| 36 | 37 | ||
| 37 | @Override | 38 | @Override |
| 38 | public void loadOntology(OWLOntology ontology) { | 39 | public void loadOntology(OWLOntology ontology) { |
| 39 | onto = ontology; | 40 | if(isDisposed()) throw new DisposedException(); |
| 40 | } | 41 | onto = ontology; |
| 41 | 42 | } | |
| 42 | @Override | 43 | |
| 43 | public boolean preprocess() { | 44 | @Override |
| 44 | OWLOntology tbox = onto; | 45 | public boolean preprocess() { |
| 45 | try { | 46 | if(isDisposed()) throw new DisposedException(); |
| 46 | onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); | 47 | OWLOntology tbox = onto; |
| 47 | importedOntologyPath = OWLHelper.getOntologyPath(onto); | 48 | try { |
| 48 | } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) { | 49 | onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); |
| 49 | e.printStackTrace(); | 50 | importedOntologyPath = OWLHelper.getOntologyPath(onto); |
| 50 | } | 51 | } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) { |
| 51 | 52 | e.printStackTrace(); | |
| 52 | DatalogProgram datalogProgram = new DatalogProgram(tbox, false); | 53 | } |
| 53 | importData(datalogProgram.getAdditionalDataFile()); | 54 | |
| 54 | upperStore = new MultiStageQueryEngine("rl-upper", false); | 55 | DatalogProgram datalogProgram = new DatalogProgram(tbox, false); |
| 55 | upperStore.importRDFData("data", importedData.toString()); | 56 | importData(datalogProgram.getAdditionalDataFile()); |
| 56 | GapByStore4ID gap = new GapByStore4ID(upperStore); | 57 | upperStore = new MultiStageQueryEngine("rl-upper", false); |
| 57 | upperStore.materialiseFoldedly(datalogProgram, gap); | 58 | upperStore.importRDFData("data", importedData.toString()); |
| 58 | gap.clear(); | 59 | GapByStore4ID gap = new GapByStore4ID(upperStore); |
| 59 | 60 | upperStore.materialiseFoldedly(datalogProgram, gap); | |
| 60 | factory = onto.getOWLOntologyManager().getOWLDataFactory(); | 61 | gap.clear(); |
| 61 | roller = new QueryRoller(factory); | 62 | |
| 62 | 63 | factory = onto.getOWLOntologyManager().getOWLDataFactory(); | |
| 63 | hermit = new Reasoner(onto); | 64 | roller = new QueryRoller(factory); |
| 64 | return isConsistent(); | 65 | |
| 65 | } | 66 | hermit = new Reasoner(onto); |
| 66 | 67 | return isConsistent(); | |
| 67 | @Override | 68 | } |
| 68 | public boolean isConsistent() { | 69 | |
| 69 | if (toCheckSatisfiability) | 70 | @Override |
| 70 | return hermit.isConsistent(); | 71 | public boolean isConsistent() { |
| 71 | return true; | 72 | if(isDisposed()) throw new DisposedException(); |
| 72 | } | 73 | if(toCheckSatisfiability) |
| 73 | 74 | return hermit.isConsistent(); | |
| 74 | @Override | 75 | return true; |
| 75 | public void evaluate(QueryRecord record) { | 76 | } |
| 76 | String[] disVars = record.getDistinguishedVariables(); | 77 | |
| 77 | Set<OWLNamedIndividual> individuals = onto.getIndividualsInSignature(true); | 78 | @Override |
| 78 | if (disVars.length == 1) { | 79 | public void evaluate(QueryRecord record) { |
| 79 | OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]); | 80 | if(isDisposed()) throw new DisposedException(); |
| 80 | Set<AnswerTuple> answers = new HashSet<AnswerTuple>(); | 81 | String[] disVars = record.getDistinguishedVariables(); |
| 81 | for (OWLNamedIndividual individual: individuals) { | 82 | Set<OWLNamedIndividual> individuals = onto.getIndividualsInSignature(true); |
| 82 | Utility.logDebug("checking ... " + individual); | 83 | if(disVars.length == 1) { |
| 83 | if (hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) { | 84 | OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]); |
| 84 | answers.add(new AnswerTuple(new Individual[] {Individual.create(individual.toStringID())})); | 85 | Set<AnswerTuple> answers = new HashSet<AnswerTuple>(); |
| 85 | } | 86 | for(OWLNamedIndividual individual : individuals) { |
| 86 | } | 87 | Utility.logDebug("checking ... " + individual); |
| 87 | record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers)); | 88 | if(hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) { |
| 88 | record.markAsProcessed(); | 89 | answers.add(new AnswerTuple(new Individual[]{Individual.create(individual.toStringID())})); |
| 89 | } | 90 | } |
| 90 | else { | 91 | } |
| 91 | // FIXME join here | 92 | record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers)); |
| 92 | record.markAsProcessed(); | 93 | record.markAsProcessed(); |
| 93 | } | 94 | } |
| 94 | } | 95 | else { |
| 95 | 96 | // FIXME join here | |
| 96 | @Override | 97 | record.markAsProcessed(); |
| 97 | public void evaluateUpper(QueryRecord record) { | 98 | } |
| 98 | AnswerTuples rlAnswer = null; | 99 | } |
| 99 | try { | 100 | |
| 100 | rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables()); | 101 | @Override |
| 101 | record.updateUpperBoundAnswers(rlAnswer, true); | 102 | public void evaluateUpper(QueryRecord record) { |
| 102 | } finally { | 103 | if(isDisposed()) throw new DisposedException(); |
| 103 | if (rlAnswer != null) rlAnswer.dispose(); | 104 | AnswerTuples rlAnswer = null; |
| 104 | } | 105 | try { |
| 105 | } | 106 | rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables()); |
| 106 | 107 | record.updateUpperBoundAnswers(rlAnswer, true); | |
| 107 | @Override | 108 | } finally { |
| 108 | public void dispose() { | 109 | if(rlAnswer != null) rlAnswer.dispose(); |
| 109 | if (importedOntologyPath != null) { | 110 | } |
| 110 | File tmp = new File(importedOntologyPath); | 111 | } |
| 111 | if (tmp.exists()) tmp.delete(); | 112 | |
| 112 | } | 113 | @Override |
| 113 | super.dispose(); | 114 | public void dispose() { |
| 114 | } | 115 | super.dispose(); |
| 116 | if(importedOntologyPath != null) { | ||
| 117 | File tmp = new File(importedOntologyPath); | ||
| 118 | if(tmp.exists()) tmp.delete(); | ||
| 119 | } | ||
| 120 | } | ||
| 115 | 121 | ||
| 116 | } | 122 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index 618fb70..8445713 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | |||
| @@ -21,362 +21,364 @@ import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderDisjVar1; | |||
| 21 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap; | 21 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap; |
| 22 | import uk.ac.ox.cs.pagoda.util.Timer; | 22 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 23 | import uk.ac.ox.cs.pagoda.util.Utility; | 23 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 24 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 24 | import uk.ac.ox.cs.pagoda.util.tuples.Tuple; | 25 | import uk.ac.ox.cs.pagoda.util.tuples.Tuple; |
| 25 | 26 | ||
| 26 | import java.util.Collection; | 27 | import java.util.Collection; |
| 27 | 28 | ||
| 28 | class MyQueryReasoner extends QueryReasoner { | 29 | class MyQueryReasoner extends QueryReasoner { |
| 29 | 30 | ||
| 30 | OWLOntology ontology; | 31 | OWLOntology ontology; |
| 31 | DatalogProgram program; | 32 | DatalogProgram program; |
| 32 | 33 | ||
| 33 | // String additonalDataFile; | 34 | // String additonalDataFile; |
| 34 | BasicQueryEngine rlLowerStore = null; | 35 | BasicQueryEngine rlLowerStore = null; |
| 35 | BasicQueryEngine lazyUpperStore = null; | 36 | BasicQueryEngine lazyUpperStore = null; |
| 36 | // BasicQueryEngine limitedSkolemUpperStore; | 37 | MultiStageQueryEngine limitedSkolemUpperStore; |
| 37 | OWLOntology elho_ontology; | 38 | OWLOntology elho_ontology; |
| 38 | // boolean[] namedIndividuals_lazyUpper; | 39 | // boolean[] namedIndividuals_lazyUpper; |
| 39 | KarmaQueryEngine elLowerStore = null; | 40 | KarmaQueryEngine elLowerStore = null; |
| 40 | BasicQueryEngine trackingStore = null; | 41 | BasicQueryEngine trackingStore = null; |
| 41 | // boolean[] namedIndividuals_tracking; | 42 | // boolean[] namedIndividuals_tracking; |
| 42 | TrackingRuleEncoder encoder; | 43 | TrackingRuleEncoder encoder; |
| 43 | private boolean equalityTag; | 44 | private boolean equalityTag; |
| 44 | private boolean multiStageTag; | 45 | private boolean multiStageTag; |
| 45 | private Timer t = new Timer(); | 46 | private Timer t = new Timer(); |
| 46 | private Collection<String> predicatesWithGap = null; | 47 | private Collection<String> predicatesWithGap = null; |
| 47 | private SatisfiabilityStatus satisfiable; | 48 | private SatisfiabilityStatus satisfiable; |
| 48 | private ConsistencyManager consistency = new ConsistencyManager(this); | 49 | private ConsistencyManager consistency = new ConsistencyManager(this); |
| 49 | private boolean useUpperStores = false; | 50 | private boolean useUpperStores = false; |
| 50 | public MyQueryReasoner() { | 51 | |
| 51 | setup(true, true); | 52 | public MyQueryReasoner() { |
| 52 | } | 53 | setup(true, true); |
| 53 | public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | 54 | } |
| 54 | setup(multiStageTag, considerEqualities); | 55 | |
| 55 | } | 56 | public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { |
| 56 | 57 | setup(multiStageTag, considerEqualities); | |
| 57 | private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { | 58 | } |
| 58 | if (multiStageTag) | 59 | |
| 59 | return new MultiStageQueryEngine(name, checkValidity); | 60 | public void setup(boolean multiStageTag, boolean considerEqualities) { |
| 60 | // return new TwoStageQueryEngine(name, checkValidity); | 61 | if(isDisposed()) throw new DisposedException(); |
| 61 | else | 62 | satisfiable = SatisfiabilityStatus.UNCHECKED; |
| 62 | return new BasicQueryEngine(name); | 63 | this.multiStageTag = multiStageTag; |
| 63 | } | 64 | this.equalityTag = considerEqualities; |
| 64 | 65 | ||
| 65 | public void setup(boolean multiStageTag, boolean considerEqualities) { | 66 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); |
| 66 | satisfiable = SatisfiabilityStatus.UNCHECKED; | 67 | elLowerStore = new KarmaQueryEngine("elho-lower-bound"); |
| 67 | this.multiStageTag = multiStageTag; | 68 | |
| 68 | this.equalityTag = considerEqualities; | 69 | trackingStore = getUpperStore("tracking", false); |
| 69 | 70 | } | |
| 70 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | 71 | |
| 71 | elLowerStore = new KarmaQueryEngine("elho-lower-bound"); | 72 | @Override |
| 72 | 73 | public void loadOntology(OWLOntology o) { | |
| 73 | trackingStore = getUpperStore("tracking", false); | 74 | if(isDisposed()) throw new DisposedException(); |
| 74 | } | 75 | if(!equalityTag) { |
| 75 | 76 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | |
| 76 | protected void internal_importDataFile(String name, String datafile) { | 77 | o = eliminator.getOutputOntology(); |
| 77 | // addDataFile(datafile); | 78 | eliminator.save(); |
| 78 | rlLowerStore.importRDFData(name, datafile); | 79 | } |
| 79 | if (lazyUpperStore != null) | 80 | |
| 80 | lazyUpperStore.importRDFData(name, datafile); | 81 | ontology = o; |
| 81 | elLowerStore.importRDFData(name, datafile); | 82 | program = new DatalogProgram(ontology, properties.getToClassify()); |
| 82 | trackingStore.importRDFData(name, datafile); | ||
| 83 | } | ||
| 84 | |||
| 85 | @Override | ||
| 86 | public void loadOntology(OWLOntology o) { | ||
| 87 | if(!equalityTag) { | ||
| 88 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | ||
| 89 | o = eliminator.getOutputOntology(); | ||
| 90 | eliminator.save(); | ||
| 91 | } | ||
| 92 | |||
| 93 | ontology = o; | ||
| 94 | program = new DatalogProgram(ontology, properties.getToClassify()); | ||
| 95 | // program.getLower().save(); | 83 | // program.getLower().save(); |
| 96 | // program.getUpper().save(); | 84 | // program.getUpper().save(); |
| 97 | // program.getGeneral().save(); | 85 | // program.getGeneral().save(); |
| 98 | 86 | ||
| 99 | useUpperStores = multiStageTag && !program.getGeneral().isHorn(); | 87 | useUpperStores = multiStageTag && !program.getGeneral().isHorn(); |
| 100 | if(useUpperStores) { | 88 | if(useUpperStores) { |
| 101 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); | 89 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); |
| 102 | // limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); | 90 | limitedSkolemUpperStore = new MultiStageQueryEngine("limited-skolem-upper-bound", true); |
| 103 | } | 91 | } |
| 104 | 92 | ||
| 105 | importData(program.getAdditionalDataFile()); | 93 | importData(program.getAdditionalDataFile()); |
| 106 | 94 | ||
| 107 | elho_ontology = new ELHOProfile().getFragment(ontology); | 95 | elho_ontology = new ELHOProfile().getFragment(ontology); |
| 108 | elLowerStore.processOntology(elho_ontology); | 96 | elLowerStore.processOntology(elho_ontology); |
| 109 | } | 97 | } |
| 110 | 98 | ||
| 111 | public Collection<String> getPredicatesWithGap() { | 99 | public Collection<String> getPredicatesWithGap() { |
| 112 | return predicatesWithGap; | 100 | if(isDisposed()) throw new DisposedException(); |
| 113 | } | 101 | return predicatesWithGap; |
| 102 | } | ||
| 114 | 103 | ||
| 115 | @Override | 104 | @Override |
| 116 | public boolean preprocess() { | 105 | public boolean preprocess() { |
| 117 | t.reset(); | 106 | if(isDisposed()) throw new DisposedException(); |
| 118 | Utility.logInfo("Preprocessing... checking satisfiability... "); | 107 | t.reset(); |
| 108 | Utility.logInfo("Preprocessing... checking satisfiability... "); | ||
| 119 | 109 | ||
| 120 | String name = "data", datafile = importedData.toString(); | 110 | String name = "data", datafile = importedData.toString(); |
| 121 | rlLowerStore.importRDFData(name, datafile); | 111 | rlLowerStore.importRDFData(name, datafile); |
| 122 | rlLowerStore.materialise("lower program", program.getLower().toString()); | 112 | rlLowerStore.materialise("lower program", program.getLower().toString()); |
| 123 | // program.getLower().save(); | 113 | // program.getLower().save(); |
| 124 | if(!consistency.checkRLLowerBound()) return false; | 114 | if(!consistency.checkRLLowerBound()) return false; |
| 125 | Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); | 115 | Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); |
| 126 | 116 | ||
| 127 | String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); | 117 | String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); |
| 128 | 118 | ||
| 129 | elLowerStore.importRDFData(name, datafile); | 119 | elLowerStore.importRDFData(name, datafile); |
| 130 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); | 120 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); |
| 131 | elLowerStore.materialise("lower program", program.getLower().toString()); | 121 | elLowerStore.materialise("lower program", program.getLower().toString()); |
| 132 | elLowerStore.initialiseKarma(); | 122 | elLowerStore.initialiseKarma(); |
| 133 | if(!consistency.checkELLowerBound()) return false; | 123 | if(!consistency.checkELLowerBound()) return false; |
| 134 | 124 | ||
| 135 | if(lazyUpperStore != null) { | 125 | if(lazyUpperStore != null) { |
| 136 | lazyUpperStore.importRDFData(name, datafile); | 126 | lazyUpperStore.importRDFData(name, datafile); |
| 137 | lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); | 127 | lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); |
| 138 | int tag = lazyUpperStore.materialiseRestrictedly(program, null); | 128 | int tag = lazyUpperStore.materialiseRestrictedly(program, null); |
| 139 | if(tag != 1) { | 129 | if(tag != 1) { |
| 140 | lazyUpperStore.dispose(); | 130 | lazyUpperStore.dispose(); |
| 141 | lazyUpperStore = null; | 131 | lazyUpperStore = null; |
| 142 | } | 132 | } |
| 143 | if(tag == -1) return false; | 133 | if(tag == -1) return false; |
| 144 | } | 134 | } |
| 145 | if(consistency.checkUpper(lazyUpperStore)) { | 135 | if(consistency.checkUpper(lazyUpperStore)) { |
| 146 | satisfiable = SatisfiabilityStatus.SATISFIABLE; | 136 | satisfiable = SatisfiabilityStatus.SATISFIABLE; |
| 147 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | 137 | Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 148 | } | 138 | } |
| 149 | 139 | ||
| 150 | // if(limitedSkolemUpperStore != null) { | 140 | if(limitedSkolemUpperStore != null) { |
| 151 | // limitedSkolemUpperStore.importRDFData(name, datafile); | 141 | limitedSkolemUpperStore.importRDFData(name, datafile); |
| 152 | // limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); | 142 | limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); |
| 153 | // int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); | 143 | int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); |
| 154 | // if(tag != 1) { | 144 | if(tag != 1) { |
| 155 | // limitedSkolemUpperStore.dispose(); | 145 | limitedSkolemUpperStore.dispose(); |
| 156 | // limitedSkolemUpperStore = null; | 146 | limitedSkolemUpperStore = null; |
| 157 | // } | 147 | } |
| 158 | // if(tag == -1) return false; | 148 | if(tag == -1) return false; |
| 159 | // } | 149 | } |
| 160 | // if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { | 150 | if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { |
| 161 | // satisfiable = SatisfiabilityStatus.SATISFIABLE; | 151 | satisfiable = SatisfiabilityStatus.SATISFIABLE; |
| 162 | // Utility.logInfo("time for satisfiability checking: " + t.duration()); | 152 | Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 163 | // } | 153 | } |
| 164 | 154 | ||
| 165 | trackingStore.importRDFData(name, datafile); | 155 | trackingStore.importRDFData(name, datafile); |
| 166 | trackingStore.materialise("saturate named individuals", originalMarkProgram); | 156 | trackingStore.materialise("saturate named individuals", originalMarkProgram); |
| 167 | 157 | ||
| 168 | // materialiseFullUpper(); | 158 | // materialiseFullUpper(); |
| 169 | // GapByStore4ID gap = new GapByStore4ID(trackingStore); | 159 | // GapByStore4ID gap = new GapByStore4ID(trackingStore); |
| 170 | GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); | 160 | GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); |
| 171 | trackingStore.materialiseFoldedly(program, gap); | 161 | trackingStore.materialiseFoldedly(program, gap); |
| 172 | predicatesWithGap = gap.getPredicatesWithGap(); | 162 | predicatesWithGap = gap.getPredicatesWithGap(); |
| 173 | gap.clear(); | 163 | gap.clear(); |
| 174 | 164 | ||
| 175 | if(program.getGeneral().isHorn()) | 165 | if(program.getGeneral().isHorn()) |
| 176 | encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); | 166 | encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); |
| 177 | else | 167 | else |
| 178 | encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); | 168 | encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); |
| 179 | // encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); | 169 | // encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); |
| 180 | // encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); | 170 | // encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); |
| 181 | // encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); | 171 | // encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); |
| 182 | 172 | ||
| 183 | program.deleteABoxTurtleFile(); | 173 | if(!isConsistent()) |
| 184 | 174 | return false; | |
| 185 | if(!isConsistent()) | 175 | |
| 186 | return false; | 176 | consistency.extractBottomFragment(); |
| 187 | 177 | consistency.dispose(); | |
| 188 | consistency.extractBottomFragment(); | 178 | |
| 189 | consistency.dispose(); | 179 | program.dispose(); |
| 190 | 180 | ||
| 191 | return true; | 181 | return true; |
| 192 | } | 182 | } |
| 193 | 183 | ||
| 194 | @Override | 184 | @Override |
| 195 | public boolean isConsistent() { | 185 | public boolean isConsistent() { |
| 196 | if(satisfiable == SatisfiabilityStatus.UNCHECKED) { | 186 | if(isDisposed()) throw new DisposedException(); |
| 197 | satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE; | 187 | if(satisfiable == SatisfiabilityStatus.UNCHECKED) { |
| 198 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | 188 | satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE; |
| 199 | } | 189 | Utility.logInfo("time for satisfiability checking: " + t.duration()); |
| 200 | return satisfiable == SatisfiabilityStatus.SATISFIABLE; | 190 | } |
| 201 | } | 191 | return satisfiable == SatisfiabilityStatus.SATISFIABLE; |
| 202 | 192 | } | |
| 203 | /** | 193 | |
| 204 | * It deals with blanks nodes differently from variables | 194 | @Override |
| 205 | * according to SPARQL semantics for OWL2 Entailment Regime. | 195 | public void evaluate(QueryRecord queryRecord) { |
| 206 | * <p> | 196 | if(isDisposed()) throw new DisposedException(); |
| 207 | * In particular variables are matched only against named individuals, | 197 | if(queryBounds(queryRecord)) |
| 208 | * and blank nodes against named and anonymous individuals. | 198 | return; |
| 209 | */ | 199 | |
| 210 | private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord, | 200 | OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); |
| 211 | Tuple<String> extendedQuery, Step step) { | 201 | |
| 212 | 202 | int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size(); | |
| 213 | if(queryRecord.hasNonAnsDistinguishedVariables()) | 203 | Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset |
| 214 | queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables()); | 204 | .getAxiomCount() - aBoxCount)); |
| 215 | else | 205 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); |
| 216 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 206 | |
| 217 | 207 | if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) | |
| 218 | queryRecord.addProcessingTime(step, t.duration()); | 208 | return; |
| 219 | if(queryRecord.isProcessed()) { | 209 | |
| 220 | queryRecord.setDifficulty(step); | 210 | Timer t = new Timer(); |
| 221 | return true; | 211 | Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); |
| 222 | } | 212 | summarisedChecker.check(queryRecord.getGapAnswers()); |
| 223 | return false; | 213 | summarisedChecker.dispose(); |
| 224 | } | 214 | Utility.logDebug("Total time for full reasoner: " + t.duration()); |
| 225 | 215 | queryRecord.markAsProcessed(); | |
| 226 | /** | 216 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); |
| 227 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. | 217 | } |
| 228 | * */ | 218 | |
| 229 | private boolean queryBounds(QueryRecord queryRecord) { | 219 | @Override |
| 230 | AnswerTuples rlAnswer = null, elAnswer = null; | 220 | public void evaluateUpper(QueryRecord queryRecord) { |
| 231 | 221 | if(isDisposed()) throw new DisposedException(); | |
| 232 | t.reset(); | 222 | // TODO add new upper store |
| 233 | try { | 223 | AnswerTuples rlAnswer = null; |
| 234 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 224 | boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; |
| 235 | Utility.logDebug(t.duration()); | 225 | try { |
| 236 | queryRecord.updateLowerBoundAnswers(rlAnswer); | 226 | rlAnswer = |
| 237 | } finally { | 227 | (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| 238 | if (rlAnswer != null) rlAnswer.dispose(); | 228 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); |
| 239 | } | 229 | } finally { |
| 240 | queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); | 230 | if(rlAnswer != null) rlAnswer.dispose(); |
| 241 | 231 | } | |
| 242 | t.reset(); | 232 | } |
| 243 | 233 | ||
| 244 | Tuple<String> extendedQueryTexts = queryRecord.getExtendedQueryText(); | 234 | @Override |
| 245 | 235 | public void dispose() { | |
| 246 | Utility.logDebug("Tracking store"); | 236 | super.dispose(); |
| 247 | if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) | 237 | |
| 248 | return true; | 238 | if(encoder != null) encoder.dispose(); |
| 249 | 239 | if(rlLowerStore != null) rlLowerStore.dispose(); | |
| 250 | if(!queryRecord.isBottom()) { | 240 | if(lazyUpperStore != null) lazyUpperStore.dispose(); |
| 251 | Utility.logDebug("Lazy store"); | 241 | if(elLowerStore != null) elLowerStore.dispose(); |
| 252 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) | 242 | if(trackingStore != null) trackingStore.dispose(); |
| 253 | return true; | 243 | if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); |
| 244 | |||
| 245 | } | ||
| 246 | |||
| 247 | private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { | ||
| 248 | if(multiStageTag) | ||
| 249 | return new MultiStageQueryEngine(name, checkValidity); | ||
| 250 | // return new TwoStageQueryEngine(name, checkValidity); | ||
| 251 | else | ||
| 252 | return new BasicQueryEngine(name); | ||
| 253 | } | ||
| 254 | |||
| 255 | protected void internal_importDataFile(String name, String datafile) { | ||
| 256 | // addDataFile(datafile); | ||
| 257 | rlLowerStore.importRDFData(name, datafile); | ||
| 258 | if(lazyUpperStore != null) | ||
| 259 | lazyUpperStore.importRDFData(name, datafile); | ||
| 260 | elLowerStore.importRDFData(name, datafile); | ||
| 261 | trackingStore.importRDFData(name, datafile); | ||
| 262 | } | ||
| 263 | |||
| 264 | /** | ||
| 265 | * It deals with blanks nodes differently from variables | ||
| 266 | * according to SPARQL semantics for OWL2 Entailment Regime. | ||
| 267 | * <p> | ||
| 268 | * In particular variables are matched only against named individuals, | ||
| 269 | * and blank nodes against named and anonymous individuals. | ||
| 270 | */ | ||
| 271 | private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord, | ||
| 272 | Tuple<String> extendedQuery, Step step) { | ||
| 273 | |||
| 274 | if(queryRecord.hasNonAnsDistinguishedVariables()) | ||
| 275 | queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables()); | ||
| 276 | else | ||
| 277 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 278 | |||
| 279 | queryRecord.addProcessingTime(step, t.duration()); | ||
| 280 | if(queryRecord.isProcessed()) { | ||
| 281 | queryRecord.setDifficulty(step); | ||
| 282 | return true; | ||
| 283 | } | ||
| 284 | return false; | ||
| 285 | } | ||
| 286 | |||
| 287 | /** | ||
| 288 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. | ||
| 289 | */ | ||
| 290 | private boolean queryBounds(QueryRecord queryRecord) { | ||
| 291 | AnswerTuples rlAnswer = null, elAnswer = null; | ||
| 292 | |||
| 293 | t.reset(); | ||
| 294 | try { | ||
| 295 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 296 | Utility.logDebug(t.duration()); | ||
| 297 | queryRecord.updateLowerBoundAnswers(rlAnswer); | ||
| 298 | } finally { | ||
| 299 | if(rlAnswer != null) rlAnswer.dispose(); | ||
| 300 | } | ||
| 301 | queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); | ||
| 302 | |||
| 303 | t.reset(); | ||
| 304 | |||
| 305 | Tuple<String> extendedQueryTexts = queryRecord.getExtendedQueryText(); | ||
| 306 | |||
| 307 | Utility.logDebug("Tracking store"); | ||
| 308 | if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) | ||
| 309 | return true; | ||
| 310 | |||
| 311 | if(!queryRecord.isBottom()) { | ||
| 312 | Utility.logDebug("Lazy store"); | ||
| 313 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) | ||
| 314 | return true; | ||
| 254 | // Utility.logDebug("Skolem store"); | 315 | // Utility.logDebug("Skolem store"); |
| 255 | // if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND)) | 316 | // if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND)) |
| 256 | // return null; | 317 | // return null; |
| 257 | } | 318 | } |
| 258 | 319 | ||
| 259 | t.reset(); | 320 | t.reset(); |
| 260 | try { | 321 | try { |
| 261 | elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0), | 322 | elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0), |
| 262 | queryRecord.getAnswerVariables(), | 323 | queryRecord.getAnswerVariables(), |
| 263 | queryRecord.getLowerBoundAnswers()); | 324 | queryRecord.getLowerBoundAnswers()); |
| 264 | Utility.logDebug(t.duration()); | 325 | Utility.logDebug(t.duration()); |
| 265 | queryRecord.updateLowerBoundAnswers(elAnswer); | 326 | queryRecord.updateLowerBoundAnswers(elAnswer); |
| 266 | } finally { | 327 | } finally { |
| 267 | if (elAnswer != null) elAnswer.dispose(); | 328 | if(elAnswer != null) elAnswer.dispose(); |
| 268 | } | 329 | } |
| 269 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); | 330 | queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); |
| 270 | 331 | ||
| 271 | if(queryRecord.isProcessed()) { | 332 | if(queryRecord.isProcessed()) { |
| 272 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); | 333 | queryRecord.setDifficulty(Step.EL_LOWER_BOUND); |
| 273 | return true; | 334 | return true; |
| 274 | } | 335 | } |
| 275 | 336 | ||
| 276 | return false; | 337 | return false; |
| 277 | } | 338 | } |
| 278 | 339 | ||
| 279 | private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { | 340 | private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { |
| 280 | t.reset(); | 341 | t.reset(); |
| 281 | 342 | ||
| 282 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); | 343 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); |
| 283 | OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true); | 344 | OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true); |
| 284 | 345 | ||
| 285 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); | 346 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); |
| 286 | 347 | ||
| 287 | return relevantOntologySubset; | 348 | return relevantOntologySubset; |
| 288 | } | 349 | } |
| 289 | 350 | ||
| 290 | private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { | 351 | private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { |
| 291 | AnswerTuples rlAnswer = null; | 352 | AnswerTuples rlAnswer = null; |
| 292 | try { | 353 | try { |
| 293 | Utility.logDebug(queryText); | 354 | Utility.logDebug(queryText); |
| 294 | rlAnswer = upperStore.evaluate(queryText, answerVariables); | 355 | rlAnswer = upperStore.evaluate(queryText, answerVariables); |
| 295 | Utility.logDebug(t.duration()); | 356 | Utility.logDebug(t.duration()); |
| 296 | queryRecord.updateUpperBoundAnswers(rlAnswer); | 357 | queryRecord.updateUpperBoundAnswers(rlAnswer); |
| 297 | } finally { | 358 | } finally { |
| 298 | if(rlAnswer != null) rlAnswer.dispose(); | 359 | if(rlAnswer != null) rlAnswer.dispose(); |
| 299 | } | 360 | } |
| 300 | } | 361 | } |
| 301 | 362 | ||
| 302 | @Override | 363 | private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { |
| 303 | public void evaluate(QueryRecord queryRecord) { | 364 | DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false |
| 304 | if(queryBounds(queryRecord)) | 365 | |
| 305 | return; | 366 | MultiStageQueryEngine relevantStore = |
| 306 | 367 | new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true | |
| 307 | OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); | 368 | // relevantStore.importRDFData("data", relevantProgram.getAdditionalDataFile()); // tried, doesn't work |
| 308 | 369 | relevantStore.importDataFromABoxOf(relevantSubset); | |
| 309 | int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size(); | 370 | |
| 310 | Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset | 371 | int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); |
| 311 | .getAxiomCount() - aBoxCount)); | 372 | // int materialisationResult = relevantStore.materialiseRestrictedly(relevantProgram, null); // DOESN'T WORK!!! |
| 312 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); | 373 | if(materialisationResult != 1) |
| 313 | 374 | throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency | |
| 314 | if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) | ||
| 315 | return; | ||
| 316 | |||
| 317 | Timer t = new Timer(); | ||
| 318 | Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); | ||
| 319 | summarisedChecker.check(queryRecord.getGapAnswers()); | ||
| 320 | summarisedChecker.dispose(); | ||
| 321 | Utility.logDebug("Total time for full reasoner: " + t.duration()); | ||
| 322 | queryRecord.markAsProcessed(); | ||
| 323 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); | ||
| 324 | } | ||
| 325 | |||
| 326 | private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { | ||
| 327 | MultiStageQueryEngine relevantStore = | ||
| 328 | new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true | ||
| 329 | DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false | ||
| 330 | |||
| 331 | // relevantStore.importRDFData("data", importedData.toString()); // 2 answers more | ||
| 332 | relevantStore.importDataFromABoxOf(relevantSubset); | ||
| 333 | |||
| 334 | int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); | ||
| 335 | if(materialisationResult != 1) | ||
| 336 | throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency | ||
| 337 | // relevantStore.materialiseRestrictedly(relevantProgram, null); // it has been tried | 375 | // relevantStore.materialiseRestrictedly(relevantProgram, null); // it has been tried |
| 338 | 376 | ||
| 339 | return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); | 377 | return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); |
| 340 | 378 | ||
| 341 | // the following has been tried | 379 | // return queryUpperStore(limitedSkolemUpperStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); |
| 342 | // Tuple<String> extendedQueryText = queryRecord.getExtendedQueryText(); | 380 | } |
| 343 | // if(queryRecord.hasNonAnsDistinguishedVariables()) { | 381 | |
| 344 | // queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(0), queryRecord.getAnswerVariables()); | 382 | enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} |
| 345 | // queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(1), queryRecord.getDistinguishedVariables()); | ||
| 346 | // } | ||
| 347 | // else | ||
| 348 | // queryUpperBound(relevantStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 349 | // | ||
| 350 | // return queryRecord.isProcessed(); | ||
| 351 | |||
| 352 | } | ||
| 353 | |||
| 354 | @Override | ||
| 355 | public void evaluateUpper(QueryRecord queryRecord) { | ||
| 356 | // TODO add new upper store | ||
| 357 | AnswerTuples rlAnswer = null; | ||
| 358 | boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; | ||
| 359 | try { | ||
| 360 | rlAnswer = | ||
| 361 | (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 362 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); | ||
| 363 | } finally { | ||
| 364 | if(rlAnswer != null) rlAnswer.dispose(); | ||
| 365 | } | ||
| 366 | } | ||
| 367 | |||
| 368 | @Override | ||
| 369 | public void dispose() { | ||
| 370 | if (encoder != null) encoder.dispose(); | ||
| 371 | if (rlLowerStore != null) rlLowerStore.dispose(); | ||
| 372 | if (lazyUpperStore != null) lazyUpperStore.dispose(); | ||
| 373 | if (elLowerStore != null) elLowerStore.dispose(); | ||
| 374 | if (trackingStore != null) trackingStore.dispose(); | ||
| 375 | |||
| 376 | // if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); | ||
| 377 | super.dispose(); | ||
| 378 | } | ||
| 379 | |||
| 380 | enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} | ||
| 381 | 383 | ||
| 382 | } | 384 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java index b9abf07..3200216 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java | |||
| @@ -1,17 +1,15 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | 1 | package uk.ac.ox.cs.pagoda.reasoner; |
| 2 | 2 | ||
| 3 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 4 | import uk.ac.ox.cs.pagoda.util.disposable.Disposable; | ||
| 5 | |||
| 3 | import java.util.Collection; | 6 | import java.util.Collection; |
| 4 | 7 | ||
| 5 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | 8 | public abstract class QueryEngine extends Disposable { |
| 9 | |||
| 10 | public abstract void evaluate(Collection<String> queryTexts, String answerFile); | ||
| 6 | 11 | ||
| 7 | public interface QueryEngine { | 12 | public abstract AnswerTuples evaluate(String queryText); |
| 8 | 13 | ||
| 9 | public void evaluate(Collection<String> queryTexts, String answerFile); | 14 | public abstract AnswerTuples evaluate(String queryText, String[] answerVariables); |
| 10 | |||
| 11 | public AnswerTuples evaluate(String queryText); | ||
| 12 | |||
| 13 | public AnswerTuples evaluate(String queryText, String[] answerVariables); | ||
| 14 | |||
| 15 | public void dispose(); | ||
| 16 | |||
| 17 | } | 15 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java index 118c1b2..962a78f 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | |||
| @@ -9,6 +9,8 @@ import uk.ac.ox.cs.pagoda.query.QueryRecord; | |||
| 9 | import uk.ac.ox.cs.pagoda.util.PagodaProperties; | 9 | import uk.ac.ox.cs.pagoda.util.PagodaProperties; |
| 10 | import uk.ac.ox.cs.pagoda.util.Timer; | 10 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 11 | import uk.ac.ox.cs.pagoda.util.Utility; | 11 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 12 | import uk.ac.ox.cs.pagoda.util.disposable.Disposable; | ||
| 13 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 12 | 14 | ||
| 13 | import java.io.BufferedWriter; | 15 | import java.io.BufferedWriter; |
| 14 | import java.io.File; | 16 | import java.io.File; |
| @@ -18,230 +20,242 @@ import java.nio.file.Paths; | |||
| 18 | import java.util.Collection; | 20 | import java.util.Collection; |
| 19 | 21 | ||
| 20 | // TODO clean APIs | 22 | // TODO clean APIs |
| 21 | public abstract class QueryReasoner { | 23 | public abstract class QueryReasoner extends Disposable { |
| 22 | 24 | ||
| 23 | public static final String ImportDataFileSeparator = ";"; | 25 | public static final String ImportDataFileSeparator = ";"; |
| 24 | private static final boolean DEFAULT_MULTI_STAGES = true; | 26 | private static final boolean DEFAULT_MULTI_STAGES = true; |
| 25 | private static final boolean DEFAULT_EQUALITIES = true; | 27 | private static final boolean DEFAULT_EQUALITIES = true; |
| 26 | public boolean fullReasoner = this instanceof MyQueryReasoner; | 28 | public boolean fullReasoner = this instanceof MyQueryReasoner; |
| 27 | protected StringBuilder importedData = new StringBuilder(); | 29 | protected StringBuilder importedData = new StringBuilder(); |
| 28 | // protected boolean forSemFacet = false; | 30 | // protected boolean forSemFacet = false; |
| 29 | PagodaProperties properties; | 31 | PagodaProperties properties; |
| 30 | BufferedWriter answerWriter = null; | 32 | BufferedWriter answerWriter = null; |
| 31 | private QueryManager m_queryManager = new QueryManager(); | 33 | private QueryManager m_queryManager = new QueryManager(); |
| 32 | 34 | ||
| 33 | public static QueryReasoner getInstance(PagodaProperties p) { | 35 | public static QueryReasoner getInstance(PagodaProperties p) { |
| 34 | OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); | 36 | OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); |
| 35 | QueryReasoner pagoda = getInstance(ontology, p); | 37 | QueryReasoner pagoda = getInstance(ontology, p); |
| 36 | pagoda.properties = p; | 38 | pagoda.properties = p; |
| 37 | pagoda.loadOntology(ontology); | 39 | pagoda.loadOntology(ontology); |
| 38 | pagoda.importData(p.getDataPath()); | 40 | pagoda.importData(p.getDataPath()); |
| 39 | if (pagoda.preprocess()) { | 41 | if(pagoda.preprocess()) { |
| 40 | Utility.logInfo("The ontology is consistent!"); | 42 | Utility.logInfo("The ontology is consistent!"); |
| 41 | return pagoda; | 43 | return pagoda; |
| 42 | } | 44 | } |
| 43 | else { | 45 | else { |
| 44 | System.out.println("The ontology is inconsistent!"); | 46 | System.out.println("The ontology is inconsistent!"); |
| 45 | pagoda.dispose(); | 47 | pagoda.dispose(); |
| 46 | return null; | 48 | return null; |
| 47 | } | 49 | } |
| 48 | } | 50 | } |
| 49 | 51 | ||
| 50 | public static QueryReasoner getInstance(OWLOntology o) { | 52 | public static QueryReasoner getInstance(OWLOntology o) { |
| 51 | QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); | 53 | QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); |
| 52 | pagoda.properties = new PagodaProperties(); | 54 | pagoda.properties = new PagodaProperties(); |
| 53 | return pagoda; | 55 | return pagoda; |
| 54 | } | 56 | } |
| 55 | 57 | ||
| 56 | private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) { | 58 | private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) { |
| 57 | return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); | 59 | return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); |
| 58 | } | 60 | } |
| 59 | 61 | ||
| 60 | public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { | 62 | public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { |
| 61 | // Utility.initialise(); | 63 | // Utility.initialise(); |
| 62 | QueryReasoner reasoner; | 64 | QueryReasoner reasoner; |
| 63 | if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); | 65 | if(OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); |
| 64 | else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); | 66 | else if(OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); |
| 65 | else | 67 | else |
| 66 | switch (type) { | 68 | switch(type) { |
| 67 | case RLU: | 69 | case RLU: |
| 68 | reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); | 70 | reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); |
| 69 | break; | 71 | break; |
| 70 | case ELHOU: | 72 | case ELHOU: |
| 71 | reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); | 73 | reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); |
| 72 | break; | 74 | break; |
| 73 | default: | 75 | default: |
| 74 | reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); | 76 | reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); |
| 75 | } | 77 | } |
| 76 | return reasoner; | 78 | return reasoner; |
| 77 | } | 79 | } |
| 78 | 80 | ||
| 79 | public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { | 81 | public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { |
| 80 | return new HermiTReasoner(toCheckSatisfiability); | 82 | return new HermiTReasoner(toCheckSatisfiability); |
| 81 | } | 83 | } |
| 82 | 84 | ||
| 83 | public void setToClassify(boolean flag) { | 85 | public void setToClassify(boolean flag) { |
| 84 | properties.setToClassify(flag); | 86 | if(isDisposed()) throw new DisposedException(); |
| 85 | } | 87 | properties.setToClassify(flag); |
| 86 | 88 | } | |
| 87 | public void setToCallHermiT(boolean flag) { | 89 | |
| 88 | properties.setToCallHermiT(flag); | 90 | public void setToCallHermiT(boolean flag) { |
| 89 | } | 91 | if(isDisposed()) throw new DisposedException(); |
| 90 | 92 | properties.setToCallHermiT(flag); | |
| 91 | public void importData(String datafile) { | 93 | } |
| 92 | if (datafile != null && !datafile.equalsIgnoreCase("null")) | 94 | |
| 93 | importData(datafile.split(ImportDataFileSeparator)); | 95 | public void importData(String datafile) { |
| 94 | } | 96 | if(isDisposed()) throw new DisposedException(); |
| 95 | 97 | if(datafile != null && !datafile.equalsIgnoreCase("null")) | |
| 96 | public void importData(String[] datafiles) { | 98 | importData(datafile.split(ImportDataFileSeparator)); |
| 97 | if (datafiles != null) { | 99 | } |
| 98 | for (String datafile: datafiles) { | 100 | |
| 99 | File file = new File(datafile); | 101 | public void importData(String[] datafiles) { |
| 100 | if (file.exists()) { | 102 | if(isDisposed()) throw new DisposedException(); |
| 101 | if (file.isFile()) importDataFile(file); | 103 | if(datafiles != null) { |
| 102 | else importDataDirectory(file); | 104 | for(String datafile : datafiles) { |
| 103 | } | 105 | File file = new File(datafile); |
| 104 | else { | 106 | if(file.exists()) { |
| 105 | Utility.logError("warning: file " + datafile + " doesn't exists."); | 107 | if(file.isFile()) importDataFile(file); |
| 106 | } | 108 | else importDataDirectory(file); |
| 107 | } | 109 | } |
| 108 | } | 110 | else { |
| 109 | } | 111 | Utility.logError("warning: file " + datafile + " doesn't exists."); |
| 110 | 112 | } | |
| 111 | private void importDataDirectory(File file) { | 113 | } |
| 112 | for (File child: file.listFiles()) | 114 | } |
| 113 | if (child.isFile()) importDataFile(child); | 115 | } |
| 114 | else importDataDirectory(child); | 116 | |
| 115 | } | 117 | public abstract void loadOntology(OWLOntology ontology); |
| 116 | 118 | ||
| 117 | private void importDataFile(File file) { | 119 | public abstract boolean preprocess(); |
| 118 | String datafile; | 120 | |
| 119 | try { | 121 | public abstract boolean isConsistent(); |
| 120 | datafile = file.getCanonicalPath(); | 122 | |
| 121 | } catch (IOException e) { | 123 | public abstract void evaluate(QueryRecord record); |
| 122 | e.printStackTrace(); | 124 | |
| 123 | return; | 125 | public abstract void evaluateUpper(QueryRecord record); |
| 124 | } | 126 | |
| 125 | importDataFile(datafile); | 127 | public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { |
| 126 | } | 128 | if(isDisposed()) throw new DisposedException(); |
| 127 | 129 | if(forFacetGeneration) { | |
| 128 | protected final void importDataFile(String datafile) { | 130 | QueryRecord record = m_queryManager.create(queryText); |
| 129 | if (importedData.length() == 0) | 131 | Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); |
| 130 | importedData.append(datafile); | 132 | if(!record.isProcessed()) |
| 131 | else | 133 | evaluateUpper(record); |
| 132 | importedData.append(ImportDataFileSeparator).append(datafile); | ||
| 133 | |||
| 134 | } | ||
| 135 | |||
| 136 | public abstract void loadOntology(OWLOntology ontology); | ||
| 137 | |||
| 138 | public abstract boolean preprocess(); | ||
| 139 | |||
| 140 | public abstract boolean isConsistent(); | ||
| 141 | |||
| 142 | public abstract void evaluate(QueryRecord record); | ||
| 143 | |||
| 144 | public abstract void evaluateUpper(QueryRecord record); | ||
| 145 | |||
| 146 | public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { | ||
| 147 | if (forFacetGeneration) { | ||
| 148 | QueryRecord record = m_queryManager.create(queryText); | ||
| 149 | Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); | ||
| 150 | if(!record.isProcessed()) | ||
| 151 | evaluateUpper(record); | ||
| 152 | // AnswerTuples tuples = record.getUpperBoundAnswers(); | 134 | // AnswerTuples tuples = record.getUpperBoundAnswers(); |
| 153 | // for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { | 135 | // for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { |
| 154 | // tuple = tuples.getTuple(); | 136 | // tuple = tuples.getTuple(); |
| 155 | // if (tuple.toString().contains("NC")) | 137 | // if (tuple.toString().contains("NC")) |
| 156 | // System.out.println(tuple.toString()); | 138 | // System.out.println(tuple.toString()); |
| 157 | // } | 139 | // } |
| 158 | return record.getUpperBoundAnswers(); | 140 | return record.getUpperBoundAnswers(); |
| 159 | } else | 141 | } |
| 160 | return evaluate(queryText); | 142 | else |
| 161 | } | 143 | return evaluate(queryText); |
| 162 | 144 | } | |
| 163 | // public void evaluate(Collection<QueryRecord> queryRecords) { | 145 | |
| 164 | // evaluate(queryRecords); | 146 | public AnswerTuples evaluate(String queryText) { |
| 165 | // } | 147 | if(isDisposed()) throw new DisposedException(); |
| 148 | QueryRecord record = m_queryManager.create(queryText); | ||
| 149 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | ||
| 150 | if(!record.isProcessed()) | ||
| 151 | evaluate(record); | ||
| 152 | AnswerTuples answer = record.getAnswers(); | ||
| 153 | record.dispose(); | ||
| 154 | return answer; | ||
| 155 | |||
| 156 | } | ||
| 157 | |||
| 158 | public void evaluate_shell(String queryText) { | ||
| 159 | if(isDisposed()) throw new DisposedException(); | ||
| 160 | QueryRecord record = m_queryManager.create(queryText); | ||
| 161 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | ||
| 162 | if(!record.isProcessed()) | ||
| 163 | evaluate(record); | ||
| 164 | Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); | ||
| 165 | record.dispose(); | ||
| 166 | |||
| 167 | } | ||
| 166 | 168 | ||
| 167 | public AnswerTuples evaluate(String queryText) { | 169 | public void evaluate(Collection<QueryRecord> queryRecords) { |
| 168 | QueryRecord record = m_queryManager.create(queryText); | 170 | if(isDisposed()) throw new DisposedException(); |
| 169 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | 171 | if(!isConsistent()) { |
| 170 | if(!record.isProcessed()) | 172 | Utility.logDebug("The ontology and dataset is inconsistent."); |
| 171 | evaluate(record); | 173 | return; |
| 172 | AnswerTuples answer = record.getAnswers(); | 174 | } |
| 173 | record.dispose(); | 175 | |
| 174 | return answer; | 176 | if(properties.getAnswerPath() != null && answerWriter == null) { |
| 175 | 177 | try { | |
| 176 | } | 178 | answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath())); |
| 177 | 179 | } catch(IOException e) { | |
| 178 | public void evaluate_shell(String queryText) { | 180 | Utility.logError("The answer path is not valid!"); |
| 179 | QueryRecord record = m_queryManager.create(queryText); | 181 | e.printStackTrace(); |
| 180 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | 182 | } |
| 181 | if(!record.isProcessed()) | 183 | } |
| 182 | evaluate(record); | 184 | |
| 183 | Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); | 185 | Timer t = new Timer(); |
| 184 | record.dispose(); | 186 | Gson gson = QueryRecord.GsonCreator.getInstance(); |
| 185 | 187 | for(QueryRecord record : queryRecords) { | |
| 186 | } | ||
| 187 | |||
| 188 | public void evaluate(Collection<QueryRecord> queryRecords) { | ||
| 189 | if (!isConsistent()) { | ||
| 190 | Utility.logDebug("The ontology and dataset is inconsistent."); | ||
| 191 | return; | ||
| 192 | } | ||
| 193 | |||
| 194 | if(properties.getAnswerPath() != null && answerWriter == null) { | ||
| 195 | try { | ||
| 196 | answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath())); | ||
| 197 | } catch (IOException e) { | ||
| 198 | Utility.logError("The answer path is not valid!"); | ||
| 199 | e.printStackTrace(); | ||
| 200 | } | ||
| 201 | } | ||
| 202 | |||
| 203 | Timer t = new Timer(); | ||
| 204 | Gson gson = QueryRecord.GsonCreator.getInstance(); | ||
| 205 | for (QueryRecord record: queryRecords) { | ||
| 206 | // if (Integer.parseInt(record.getQueryID()) != 218) continue; | 188 | // if (Integer.parseInt(record.getQueryID()) != 218) continue; |
| 207 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", | 189 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", |
| 208 | record.getQueryText()); | 190 | record.getQueryText()); |
| 209 | if(!record.isProcessed()) { | 191 | if(!record.isProcessed()) { |
| 210 | t.reset(); | 192 | t.reset(); |
| 211 | if(!record.isProcessed()) | 193 | if(!record.isProcessed()) |
| 212 | evaluate(record); | 194 | evaluate(record); |
| 213 | Utility.logInfo("Total time to answer this query: " + t.duration()); | 195 | Utility.logInfo("Total time to answer this query: " + t.duration()); |
| 214 | if(!fullReasoner && !record.isProcessed()) { | 196 | if(!fullReasoner && !record.isProcessed()) { |
| 215 | Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); | 197 | Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); |
| 216 | continue; | 198 | continue; |
| 217 | } | 199 | } |
| 218 | } | 200 | } |
| 219 | record.outputAnswerStatistics(); | 201 | record.outputAnswerStatistics(); |
| 220 | record.outputTimes(); | 202 | record.outputTimes(); |
| 221 | } | 203 | } |
| 222 | /* TODO it can handle one call only | 204 | /* TODO it can handle one call only |
| 223 | if you call twice, you will end up with a json file with multiple roots */ | 205 | if you call twice, you will end up with a json file with multiple roots */ |
| 224 | if(answerWriter != null) gson.toJson(queryRecords, answerWriter); | 206 | if(answerWriter != null) gson.toJson(queryRecords, answerWriter); |
| 225 | // queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record))); | 207 | // queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record))); |
| 226 | queryRecords.stream().forEach(record -> record.dispose()); | 208 | queryRecords.stream().forEach(record -> record.dispose()); |
| 227 | } | 209 | } |
| 228 | 210 | ||
| 229 | public void dispose() { | 211 | // public void evaluate(Collection<QueryRecord> queryRecords) { |
| 230 | if (answerWriter != null) { | 212 | // evaluate(queryRecords); |
| 231 | try { | 213 | // } |
| 232 | answerWriter.close(); | 214 | |
| 233 | } catch (IOException e) { | 215 | @Override |
| 234 | e.printStackTrace(); | 216 | public void dispose() { |
| 235 | } | 217 | super.dispose(); |
| 236 | } | 218 | if(answerWriter != null) { |
| 219 | try { | ||
| 220 | answerWriter.close(); | ||
| 221 | } catch(IOException e) { | ||
| 222 | e.printStackTrace(); | ||
| 223 | } | ||
| 224 | } | ||
| 237 | // Utility.cleanup(); | 225 | // Utility.cleanup(); |
| 238 | } | 226 | } |
| 239 | 227 | ||
| 240 | public QueryManager getQueryManager() { | 228 | public QueryManager getQueryManager() { |
| 241 | return m_queryManager; | 229 | if(isDisposed()) throw new DisposedException(); |
| 242 | } | 230 | return m_queryManager; |
| 231 | } | ||
| 232 | |||
| 233 | private void importDataDirectory(File file) { | ||
| 234 | for(File child : file.listFiles()) | ||
| 235 | if(child.isFile()) importDataFile(child); | ||
| 236 | else importDataDirectory(child); | ||
| 237 | } | ||
| 238 | |||
| 239 | private void importDataFile(File file) { | ||
| 240 | String datafile; | ||
| 241 | try { | ||
| 242 | datafile = file.getCanonicalPath(); | ||
| 243 | } catch(IOException e) { | ||
| 244 | e.printStackTrace(); | ||
| 245 | return; | ||
| 246 | } | ||
| 247 | importDataFile(datafile); | ||
| 248 | } | ||
| 249 | |||
| 250 | protected final void importDataFile(String datafile) { | ||
| 251 | if(importedData.length() == 0) | ||
| 252 | importedData.append(datafile); | ||
| 253 | else | ||
| 254 | importedData.append(ImportDataFileSeparator).append(datafile); | ||
| 255 | |||
| 256 | } | ||
| 243 | 257 | ||
| 244 | 258 | ||
| 245 | public enum Type {Full, RLU, ELHOU} | 259 | public enum Type {Full, RLU, ELHOU} |
| 246 | 260 | ||
| 247 | } | 261 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java index bea5bbf..16e2627 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java | |||
| @@ -9,6 +9,7 @@ import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | |||
| 9 | import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; | 9 | import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; |
| 10 | import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; | 10 | import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; |
| 11 | import uk.ac.ox.cs.pagoda.util.Timer; | 11 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 12 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 12 | 13 | ||
| 13 | class RLQueryReasoner extends QueryReasoner { | 14 | class RLQueryReasoner extends QueryReasoner { |
| 14 | 15 | ||
| @@ -16,13 +17,14 @@ class RLQueryReasoner extends QueryReasoner { | |||
| 16 | 17 | ||
| 17 | LowerDatalogProgram program; | 18 | LowerDatalogProgram program; |
| 18 | Timer t = new Timer(); | 19 | Timer t = new Timer(); |
| 19 | 20 | ||
| 20 | public RLQueryReasoner() { | 21 | public RLQueryReasoner() { |
| 21 | rlLowerStore = new BasicQueryEngine("rl"); | 22 | rlLowerStore = new BasicQueryEngine("rl"); |
| 22 | } | 23 | } |
| 23 | 24 | ||
| 24 | @Override | 25 | @Override |
| 25 | public void evaluate(QueryRecord queryRecord) { | 26 | public void evaluate(QueryRecord queryRecord) { |
| 27 | if(isDisposed()) throw new DisposedException(); | ||
| 26 | AnswerTuples rlAnswer = null; | 28 | AnswerTuples rlAnswer = null; |
| 27 | t.reset(); | 29 | t.reset(); |
| 28 | try { | 30 | try { |
| @@ -38,12 +40,13 @@ class RLQueryReasoner extends QueryReasoner { | |||
| 38 | 40 | ||
| 39 | @Override | 41 | @Override |
| 40 | public void dispose() { | 42 | public void dispose() { |
| 41 | if (rlLowerStore != null) rlLowerStore.dispose(); | ||
| 42 | super.dispose(); | 43 | super.dispose(); |
| 44 | if(rlLowerStore != null) rlLowerStore.dispose(); | ||
| 43 | } | 45 | } |
| 44 | 46 | ||
| 45 | @Override | 47 | @Override |
| 46 | public void loadOntology(OWLOntology ontology) { | 48 | public void loadOntology(OWLOntology ontology) { |
| 49 | if(isDisposed()) throw new DisposedException(); | ||
| 47 | program = new LowerDatalogProgram(); | 50 | program = new LowerDatalogProgram(); |
| 48 | program.load(ontology, new UnaryBottom()); | 51 | program.load(ontology, new UnaryBottom()); |
| 49 | program.transform(); | 52 | program.transform(); |
| @@ -53,6 +56,7 @@ class RLQueryReasoner extends QueryReasoner { | |||
| 53 | 56 | ||
| 54 | @Override | 57 | @Override |
| 55 | public boolean preprocess() { | 58 | public boolean preprocess() { |
| 59 | if(isDisposed()) throw new DisposedException(); | ||
| 56 | rlLowerStore.importRDFData("data", importedData.toString()); | 60 | rlLowerStore.importRDFData("data", importedData.toString()); |
| 57 | rlLowerStore.materialise("lower program", program.toString()); | 61 | rlLowerStore.materialise("lower program", program.toString()); |
| 58 | 62 | ||
| @@ -61,6 +65,7 @@ class RLQueryReasoner extends QueryReasoner { | |||
| 61 | 65 | ||
| 62 | @Override | 66 | @Override |
| 63 | public boolean isConsistent() { | 67 | public boolean isConsistent() { |
| 68 | if(isDisposed()) throw new DisposedException(); | ||
| 64 | AnswerTuples ans = null; | 69 | AnswerTuples ans = null; |
| 65 | try { | 70 | try { |
| 66 | ans = rlLowerStore.evaluate(QueryRecord.botQueryText, new String[] {"X"}); | 71 | ans = rlLowerStore.evaluate(QueryRecord.botQueryText, new String[] {"X"}); |
| @@ -74,6 +79,7 @@ class RLQueryReasoner extends QueryReasoner { | |||
| 74 | 79 | ||
| 75 | @Override | 80 | @Override |
| 76 | public void evaluateUpper(QueryRecord record) { | 81 | public void evaluateUpper(QueryRecord record) { |
| 82 | if(isDisposed()) throw new DisposedException(); | ||
| 77 | evaluate(record); | 83 | evaluate(record); |
| 78 | } | 84 | } |
| 79 | 85 | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java index 547140a..d0712e1 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | |||
| @@ -10,6 +10,7 @@ import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | |||
| 10 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | 10 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; |
| 11 | import uk.ac.ox.cs.pagoda.util.Timer; | 11 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 12 | import uk.ac.ox.cs.pagoda.util.Utility; | 12 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 13 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 13 | 14 | ||
| 14 | class RLUQueryReasoner extends QueryReasoner { | 15 | class RLUQueryReasoner extends QueryReasoner { |
| 15 | 16 | ||
| @@ -19,7 +20,7 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 19 | 20 | ||
| 20 | boolean multiStageTag, equalityTag; | 21 | boolean multiStageTag, equalityTag; |
| 21 | Timer t = new Timer(); | 22 | Timer t = new Timer(); |
| 22 | 23 | ||
| 23 | public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | 24 | public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { |
| 24 | this.multiStageTag = multiStageTag; | 25 | this.multiStageTag = multiStageTag; |
| 25 | this.equalityTag = considerEqualities; | 26 | this.equalityTag = considerEqualities; |
| @@ -32,6 +33,7 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 32 | 33 | ||
| 33 | @Override | 34 | @Override |
| 34 | public void evaluate(QueryRecord queryRecord) { | 35 | public void evaluate(QueryRecord queryRecord) { |
| 36 | if(isDisposed()) throw new DisposedException(); | ||
| 35 | AnswerTuples ans = null; | 37 | AnswerTuples ans = null; |
| 36 | t.reset(); | 38 | t.reset(); |
| 37 | try { | 39 | try { |
| @@ -60,6 +62,7 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 60 | 62 | ||
| 61 | @Override | 63 | @Override |
| 62 | public void evaluateUpper(QueryRecord queryRecord) { | 64 | public void evaluateUpper(QueryRecord queryRecord) { |
| 65 | if(isDisposed()) throw new DisposedException(); | ||
| 63 | AnswerTuples ans = null; | 66 | AnswerTuples ans = null; |
| 64 | try { | 67 | try { |
| 65 | ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | 68 | ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); |
| @@ -72,13 +75,14 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 72 | 75 | ||
| 73 | @Override | 76 | @Override |
| 74 | public void dispose() { | 77 | public void dispose() { |
| 78 | super.dispose(); | ||
| 75 | if (rlLowerStore != null) rlLowerStore.dispose(); | 79 | if (rlLowerStore != null) rlLowerStore.dispose(); |
| 76 | if (rlUpperStore != null) rlUpperStore.dispose(); | 80 | if (rlUpperStore != null) rlUpperStore.dispose(); |
| 77 | super.dispose(); | ||
| 78 | } | 81 | } |
| 79 | 82 | ||
| 80 | @Override | 83 | @Override |
| 81 | public void loadOntology(OWLOntology o) { | 84 | public void loadOntology(OWLOntology o) { |
| 85 | if(isDisposed()) throw new DisposedException(); | ||
| 82 | if (!equalityTag) { | 86 | if (!equalityTag) { |
| 83 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | 87 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); |
| 84 | o = eliminator.getOutputOntology(); | 88 | o = eliminator.getOutputOntology(); |
| @@ -92,6 +96,7 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 92 | 96 | ||
| 93 | @Override | 97 | @Override |
| 94 | public boolean preprocess() { | 98 | public boolean preprocess() { |
| 99 | if(isDisposed()) throw new DisposedException(); | ||
| 95 | String datafile = importedData.toString(); | 100 | String datafile = importedData.toString(); |
| 96 | rlLowerStore.importRDFData("data", datafile); | 101 | rlLowerStore.importRDFData("data", datafile); |
| 97 | rlLowerStore.materialise("lower program", program.getLower().toString()); | 102 | rlLowerStore.materialise("lower program", program.getLower().toString()); |
| @@ -105,6 +110,7 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 105 | 110 | ||
| 106 | @Override | 111 | @Override |
| 107 | public boolean isConsistent() { | 112 | public boolean isConsistent() { |
| 113 | if(isDisposed()) throw new DisposedException(); | ||
| 108 | String[] X = new String[] { "X" }; | 114 | String[] X = new String[] { "X" }; |
| 109 | AnswerTuples ans = null; | 115 | AnswerTuples ans = null; |
| 110 | try { | 116 | try { |
| @@ -125,6 +131,5 @@ class RLUQueryReasoner extends QueryReasoner { | |||
| 125 | Utility.logDebug("The consistency of the data has not been determined yet."); | 131 | Utility.logDebug("The consistency of the data has not been determined yet."); |
| 126 | return true; | 132 | return true; |
| 127 | } | 133 | } |
| 128 | 134 | ||
| 129 | |||
| 130 | } | 135 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java index 79be8aa..fe43e09 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java | |||
| @@ -1,375 +1,377 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | 1 | package uk.ac.ox.cs.pagoda.reasoner.light; |
| 2 | 2 | ||
| 3 | import java.util.Arrays; | ||
| 4 | import java.util.Collection; | ||
| 5 | import java.util.HashSet; | ||
| 6 | import java.util.Iterator; | ||
| 7 | import java.util.Set; | ||
| 8 | |||
| 9 | import org.semanticweb.HermiT.model.DLClause; | 3 | import org.semanticweb.HermiT.model.DLClause; |
| 10 | 4 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | |
| 5 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 6 | import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; | ||
| 7 | import uk.ac.ox.cs.JRDFox.store.Parameters; | ||
| 8 | import uk.ac.ox.cs.JRDFox.store.TripleStatus; | ||
| 9 | import uk.ac.ox.cs.JRDFox.store.TupleIterator; | ||
| 11 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; | 10 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; |
| 12 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | 11 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; |
| 13 | import uk.ac.ox.cs.pagoda.query.GapByStore4ID; | 12 | import uk.ac.ox.cs.pagoda.query.GapByStore4ID; |
| 14 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | 13 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; |
| 15 | import uk.ac.ox.cs.pagoda.rules.Program; | 14 | import uk.ac.ox.cs.pagoda.rules.Program; |
| 16 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | 15 | import uk.ac.ox.cs.pagoda.util.*; |
| 17 | import uk.ac.ox.cs.pagoda.util.Namespace; | ||
| 18 | import uk.ac.ox.cs.pagoda.util.Timer; | 16 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 19 | import uk.ac.ox.cs.pagoda.util.UFS; | 17 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; |
| 20 | import uk.ac.ox.cs.pagoda.util.Utility; | 18 | |
| 21 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | 19 | import java.util.*; |
| 22 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 23 | import uk.ac.ox.cs.JRDFox.store.Parameters; | ||
| 24 | import uk.ac.ox.cs.JRDFox.store.TripleStatus; | ||
| 25 | import uk.ac.ox.cs.JRDFox.store.TupleIterator; | ||
| 26 | import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; | ||
| 27 | 20 | ||
| 28 | public class BasicQueryEngine extends RDFoxQueryEngine { | 21 | public class BasicQueryEngine extends RDFoxQueryEngine { |
| 29 | 22 | ||
| 30 | protected DataStore store; | 23 | protected DataStore store; |
| 31 | protected Parameters parameters = new Parameters(); | 24 | protected Parameters parameters = new Parameters(); |
| 32 | 25 | Set<DLClause> materialisedRules = new HashSet<DLClause>(); | |
| 33 | public BasicQueryEngine(String name) { | 26 | private UFS<String> equalityGroups = null; |
| 34 | super(name); | 27 | |
| 35 | store = RDFoxQueryEngine.createDataStore(); | 28 | public BasicQueryEngine(String name) { |
| 36 | parameters.m_allAnswersInRoot = true; | 29 | super(name); |
| 37 | parameters.m_useBushy = true; | 30 | store = RDFoxQueryEngine.createDataStore(); |
| 38 | } | 31 | parameters.m_allAnswersInRoot = true; |
| 39 | 32 | parameters.m_useBushy = true; | |
| 40 | public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { | 33 | } |
| 41 | if (gap != null) { | 34 | |
| 42 | materialise("lower program", dProgram.getLower().toString()); | 35 | public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { |
| 43 | String program = dProgram.getUpper().toString(); | 36 | if(isDisposed()) throw new DisposedException(); |
| 44 | try { | 37 | if(gap != null) { |
| 45 | gap.compile(program); | 38 | materialise("lower program", dProgram.getLower().toString()); |
| 46 | gap.addBackTo(); | 39 | String program = dProgram.getUpper().toString(); |
| 47 | getDataStore().clearRulesAndMakeFactsExplicit(); | 40 | try { |
| 48 | } catch (JRDFStoreException e) { | 41 | gap.compile(program); |
| 49 | e.printStackTrace(); | 42 | gap.addBackTo(); |
| 50 | } finally { | 43 | getDataStore().clearRulesAndMakeFactsExplicit(); |
| 51 | gap.clear(); | 44 | } catch(JRDFStoreException e) { |
| 52 | } | 45 | e.printStackTrace(); |
| 53 | } | 46 | } finally { |
| 54 | else | 47 | gap.clear(); |
| 55 | materialise("upper program", dProgram.getUpper().toString()); | 48 | } |
| 56 | } | 49 | } |
| 57 | 50 | else | |
| 58 | public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { | 51 | materialise("upper program", dProgram.getUpper().toString()); |
| 59 | if (gap != null) { | 52 | } |
| 60 | materialise("lower program", dProgram.getLower().toString()); | 53 | |
| 61 | String program = dProgram.getUpper().toString(); | 54 | public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { |
| 62 | try { | 55 | if(isDisposed()) throw new DisposedException(); |
| 63 | gap.compile(program); | 56 | if(gap != null) { |
| 64 | gap.addBackTo(); | 57 | materialise("lower program", dProgram.getLower().toString()); |
| 65 | getDataStore().clearRulesAndMakeFactsExplicit(); | 58 | String program = dProgram.getUpper().toString(); |
| 66 | } catch (JRDFStoreException e) { | 59 | try { |
| 67 | e.printStackTrace(); | 60 | gap.compile(program); |
| 68 | } finally { | 61 | gap.addBackTo(); |
| 69 | gap.clear(); | 62 | getDataStore().clearRulesAndMakeFactsExplicit(); |
| 70 | } | 63 | } catch(JRDFStoreException e) { |
| 71 | } | 64 | e.printStackTrace(); |
| 72 | else | 65 | } finally { |
| 73 | materialise("upper program", dProgram.getUpper().toString()); | 66 | gap.clear(); |
| 74 | 67 | } | |
| 75 | return 1; | 68 | } |
| 76 | } | 69 | else |
| 77 | 70 | materialise("upper program", dProgram.getUpper().toString()); | |
| 78 | @Override | 71 | |
| 79 | public AnswerTuples evaluate(String queryText) { | 72 | return 1; |
| 80 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); | 73 | } |
| 81 | } | 74 | |
| 82 | 75 | @Override | |
| 83 | @Override | 76 | public AnswerTuples evaluate(String queryText) { |
| 84 | public AnswerTuples evaluate(String queryText, String[] answerVars) { | 77 | if(isDisposed()) throw new DisposedException(); |
| 85 | TupleIterator tupleIterator; | 78 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); |
| 86 | try { | 79 | } |
| 87 | tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); | 80 | |
| 88 | } catch (JRDFStoreException e) { | 81 | @Override |
| 89 | e.printStackTrace(); | 82 | public AnswerTuples evaluate(String queryText, String[] answerVars) { |
| 90 | return null; | 83 | if(isDisposed()) throw new DisposedException(); |
| 91 | } | 84 | TupleIterator tupleIterator; |
| 92 | return new RDFoxAnswerTuples(answerVars, tupleIterator); | 85 | try { |
| 93 | } | 86 | tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); |
| 94 | 87 | } catch(JRDFStoreException e) { | |
| 95 | @Override | 88 | e.printStackTrace(); |
| 96 | public DataStore getDataStore() { | 89 | return null; |
| 97 | return store; | 90 | } |
| 98 | } | 91 | return new RDFoxAnswerTuples(answerVars, tupleIterator); |
| 99 | 92 | } | |
| 100 | @Override | 93 | |
| 101 | public void dispose() { | 94 | @Override |
| 102 | store.dispose(); | 95 | public DataStore getDataStore() { |
| 103 | } | 96 | if(isDisposed()) throw new DisposedException(); |
| 104 | 97 | return store; | |
| 105 | protected void outputClassAssertions(String filename) { | 98 | } |
| 106 | TupleIterator allTuples = null; | 99 | |
| 107 | boolean redirect = false; | 100 | @Override |
| 108 | try { | 101 | public void dispose() { |
| 109 | allTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); | 102 | super.dispose(); |
| 110 | redirect = Utility.redirectCurrentOut(filename); | 103 | store.dispose(); |
| 111 | for (long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) | 104 | } |
| 112 | System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager.getQuotedTerm(allTuples.getResource(1))); | 105 | |
| 113 | } catch (JRDFStoreException e) { | 106 | public void outputInstance4BinaryPredicate(String iri, String filename) { |
| 114 | e.printStackTrace(); | 107 | Utility.redirectCurrentOut(filename); |
| 115 | } finally { | 108 | outputInstance4BinaryPredicate(iri); |
| 116 | if (redirect) Utility.closeCurrentOut(); | 109 | Utility.closeCurrentOut(); |
| 117 | if (allTuples != null) allTuples.dispose(); | 110 | } |
| 118 | } | 111 | |
| 119 | } | 112 | public void outputInstance4BinaryPredicate(String iri) { |
| 120 | 113 | outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); | |
| 121 | public void outputInstance4BinaryPredicate(String iri, String filename) { | 114 | } |
| 122 | Utility.redirectCurrentOut(filename); | 115 | |
| 123 | outputInstance4BinaryPredicate(iri); | 116 | public void outputInstanceNumbers(String filename) { |
| 124 | Utility.closeCurrentOut(); | 117 | TupleIterator predicateTuples = null; |
| 125 | } | 118 | TupleIterator instanceTuples; |
| 126 | 119 | Set<String> number = new HashSet<String>(); | |
| 127 | public void outputInstance4BinaryPredicate(String iri) { | 120 | String predicate; |
| 128 | outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); | 121 | try { |
| 129 | } | 122 | predicateTuples = |
| 130 | 123 | getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); | |
| 131 | public void outputInstanceNumbers(String filename) { | 124 | for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { |
| 132 | TupleIterator predicateTuples = null; | 125 | predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); |
| 133 | TupleIterator instanceTuples; | 126 | instanceTuples = null; |
| 134 | Set<String> number = new HashSet<String>(); | 127 | try { |
| 135 | String predicate; | 128 | instanceTuples = |
| 136 | try { | 129 | getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); |
| 137 | predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); | 130 | long totalCount = 0; |
| 138 | for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { | 131 | for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { |
| 139 | predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); | 132 | totalCount += instanceTuples.getMultiplicity(); |
| 140 | instanceTuples = null; | 133 | } |
| 141 | try { | 134 | number.add(predicate + " * " + totalCount); |
| 142 | instanceTuples = getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); | 135 | } finally { |
| 143 | long totalCount = 0; | 136 | if(instanceTuples != null) instanceTuples.dispose(); |
| 144 | for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { | 137 | } |
| 145 | totalCount += instanceTuples.getMultiplicity(); | 138 | } |
| 146 | } | 139 | } catch(JRDFStoreException e) { |
| 147 | number.add(predicate + " * " + totalCount); | 140 | e.printStackTrace(); |
| 148 | } finally { | 141 | } finally { |
| 149 | if (instanceTuples != null) instanceTuples.dispose(); | 142 | if(predicateTuples != null) predicateTuples.dispose(); |
| 150 | } | 143 | predicateTuples = null; |
| 151 | } | 144 | } |
| 152 | } catch (JRDFStoreException e) { | 145 | |
| 153 | e.printStackTrace(); | 146 | try { |
| 154 | } finally { | 147 | predicateTuples = |
| 155 | if (predicateTuples != null) predicateTuples.dispose(); | 148 | getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); |
| 156 | predicateTuples = null; | 149 | for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { |
| 157 | } | 150 | predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); |
| 158 | 151 | instanceTuples = null; | |
| 159 | try { | 152 | try { |
| 160 | predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); | 153 | instanceTuples = |
| 161 | for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { | 154 | getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); |
| 162 | predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); | 155 | long totalCount = 0; |
| 163 | instanceTuples = null; | 156 | for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) |
| 164 | try { | 157 | totalCount += instanceTuples.getMultiplicity(); |
| 165 | instanceTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); | 158 | number.add(predicate + " * " + totalCount); |
| 166 | ; | 159 | } finally { |
| 167 | long totalCount = 0; | 160 | if(instanceTuples != null) instanceTuples.dispose(); |
| 168 | for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) | 161 | } |
| 169 | totalCount += instanceTuples.getMultiplicity(); | 162 | } |
| 170 | number.add(predicate + " * " + totalCount); | 163 | |
| 171 | } finally { | 164 | } catch(JRDFStoreException e) { |
| 172 | if (instanceTuples != null) instanceTuples.dispose(); | 165 | e.printStackTrace(); |
| 173 | } | 166 | } finally { |
| 174 | } | 167 | if(predicateTuples != null) predicateTuples.dispose(); |
| 175 | 168 | predicateTuples = null; | |
| 176 | } catch (JRDFStoreException e) { | 169 | } |
| 177 | e.printStackTrace(); | 170 | |
| 178 | } finally { | 171 | Utility.redirectCurrentOut(filename); |
| 179 | if (predicateTuples != null) predicateTuples.dispose(); | 172 | String[] ordered = number.toArray(new String[0]); |
| 180 | predicateTuples = null; | 173 | Arrays.sort(ordered, new DLPredicateComparator()); |
| 181 | } | 174 | for(String line : ordered) System.out.println(line); |
| 182 | 175 | Utility.closeCurrentOut(); | |
| 183 | Utility.redirectCurrentOut(filename); | 176 | |
| 184 | String[] ordered = number.toArray(new String[0]); | 177 | } |
| 185 | Arrays.sort(ordered, new DLPredicateComparator()); | 178 | |
| 186 | for (String line: ordered) System.out.println(line); | 179 | public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { |
| 187 | Utility.closeCurrentOut(); | 180 | TupleIterator iter = |
| 188 | 181 | store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); | |
| 189 | } | 182 | // iter.open(); |
| 190 | 183 | return iter; | |
| 191 | public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { | 184 | } |
| 192 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); | 185 | |
| 186 | public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { | ||
| 187 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); | ||
| 188 | // iter.open(); | ||
| 189 | return iter; | ||
| 190 | } | ||
| 191 | |||
| 192 | public void setExpandEquality(boolean flag) { | ||
| 193 | parameters.m_expandEquality = flag; | ||
| 194 | } | ||
| 195 | |||
| 196 | public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { | ||
| 197 | parameters.m_expandEquality = false; | ||
| 198 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); | ||
| 193 | // iter.open(); | 199 | // iter.open(); |
| 194 | return iter; | 200 | parameters.m_expandEquality = true; |
| 195 | } | 201 | return iter; |
| 196 | 202 | } | |
| 197 | public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { | 203 | |
| 198 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); | 204 | public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { |
| 199 | // iter.open(); | 205 | return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); |
| 200 | return iter; | 206 | } |
| 201 | } | 207 | |
| 202 | 208 | public String getUnusedRules(Collection<DLClause> clauses, boolean toUpdate) { | |
| 203 | public void setExpandEquality(boolean flag) { | 209 | DLClause clause; |
| 204 | parameters.m_expandEquality = flag; | 210 | for(Iterator<DLClause> iter = clauses.iterator(); iter.hasNext(); ) { |
| 205 | } | 211 | if(materialisedRules.contains(clause = iter.next())) |
| 206 | 212 | iter.remove(); | |
| 207 | public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { | 213 | else if(toUpdate) materialisedRules.add(clause); |
| 208 | parameters.m_expandEquality = false; | 214 | } |
| 209 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); | 215 | |
| 210 | // iter.open(); | 216 | if(clauses.isEmpty()) return null; |
| 211 | parameters.m_expandEquality = true; | 217 | |
| 212 | return iter; | 218 | return Program.toString(clauses); |
| 213 | } | 219 | } |
| 214 | 220 | ||
| 215 | 221 | public void outputMaterialisedRules() { | |
| 216 | public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { | 222 | System.out.println(DLClauseHelper.toString(materialisedRules)); |
| 217 | return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); | 223 | } |
| 218 | } | 224 | |
| 219 | 225 | public void outputAnswers(String query) { | |
| 220 | Set<DLClause> materialisedRules = new HashSet<DLClause>(); | 226 | TupleIterator iter = null; |
| 221 | 227 | try { | |
| 222 | public String getUnusedRules(Collection<DLClause> clauses, boolean toUpdate) { | 228 | iter = internal_evaluate(query); |
| 223 | DLClause clause; | 229 | System.out.println(query); |
| 224 | for (Iterator<DLClause> iter = clauses.iterator(); iter.hasNext(); ) { | 230 | int arity = iter.getArity(); |
| 225 | if (materialisedRules.contains(clause = iter.next())) | 231 | for(long multi = iter.open(); multi != 0; multi = iter.getNext()) { |
| 226 | iter.remove(); | 232 | for(int i = 0; i < arity; ++i) |
| 227 | else if (toUpdate) materialisedRules.add(clause); | 233 | System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); |
| 228 | } | 234 | System.out.println(); |
| 229 | 235 | } | |
| 230 | if (clauses.isEmpty()) return null; | 236 | } catch(JRDFStoreException e) { |
| 231 | 237 | e.printStackTrace(); | |
| 232 | return Program.toString(clauses); | 238 | } finally { |
| 233 | } | 239 | if(iter != null) iter.dispose(); |
| 234 | 240 | } | |
| 235 | public void outputMaterialisedRules() { | 241 | } |
| 236 | System.out.println(DLClauseHelper.toString(materialisedRules)); | 242 | |
| 237 | } | 243 | public void outputInstance4UnaryPredicate(String iri) { |
| 238 | 244 | outputAnswers("select ?x where { ?x " | |
| 239 | public void outputAnswers(String query) { | 245 | + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <" |
| 240 | TupleIterator iter = null; | 246 | + iri |
| 241 | try { | 247 | + "> .}"); |
| 242 | iter = internal_evaluate(query); | 248 | } |
| 243 | System.out.println(query); | 249 | |
| 244 | int arity = iter.getArity(); | 250 | public void outputSubjects(String p, String o) { |
| 245 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { | 251 | outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); |
| 246 | for (int i = 0; i < arity; ++i) | 252 | } |
| 247 | System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); | 253 | |
| 248 | System.out.println(); | 254 | public void outputObjects(String s, String p) { |
| 249 | } | 255 | outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); |
| 250 | } catch (JRDFStoreException e) { | 256 | } |
| 251 | e.printStackTrace(); | 257 | |
| 252 | } finally { | 258 | public void outputIDBFacts() { |
| 253 | if (iter != null) iter.dispose(); | 259 | TupleIterator iter = null; |
| 254 | } | 260 | try { |
| 255 | } | 261 | iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); |
| 256 | 262 | for(long multi = iter.open(); multi != 0; multi = iter.getNext()) { | |
| 257 | public void outputInstance4UnaryPredicate(String iri) { | 263 | for(int i = 0; i < 3; ++i) |
| 258 | outputAnswers("select ?x where { ?x " | 264 | System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); |
| 259 | + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <" | 265 | System.out.println(); |
| 260 | + iri | 266 | } |
| 261 | + "> .}"); | 267 | } catch(JRDFStoreException e) { |
| 262 | } | 268 | // TODO Auto-generated catch block |
| 263 | 269 | e.printStackTrace(); | |
| 264 | public void outputSubjects(String p, String o) { | 270 | } finally { |
| 265 | outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); | 271 | if(iter != null) iter.dispose(); |
| 266 | } | 272 | } |
| 267 | 273 | ||
| 268 | public void outputObjects(String s, String p) { | 274 | } |
| 269 | outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); | 275 | |
| 270 | } | 276 | public void outputType4Individual(String iri) { |
| 271 | 277 | outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); | |
| 272 | public void outputIDBFacts() { | 278 | } |
| 273 | TupleIterator iter = null; | 279 | |
| 274 | try { | 280 | public int getSameAsNumber() { |
| 275 | iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); | 281 | TupleIterator iter = null; |
| 276 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { | 282 | int counter = 0; |
| 277 | for (int i = 0; i < 3; ++i) | 283 | try { |
| 278 | System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); | 284 | iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); |
| 279 | System.out.println(); | 285 | for(long multi = iter.open(); multi != 0; multi = iter.getNext()) |
| 280 | } | 286 | if(iter.getResourceID(0) != iter.getResourceID(1)) |
| 281 | } catch (JRDFStoreException e) { | 287 | ++counter; |
| 282 | // TODO Auto-generated catch block | 288 | } catch(JRDFStoreException e) { |
| 283 | e.printStackTrace(); | 289 | e.printStackTrace(); |
| 284 | } finally { | 290 | } finally { |
| 285 | if (iter != null) iter.dispose(); | 291 | if(iter != null) iter.dispose(); |
| 286 | } | 292 | } |
| 287 | 293 | return counter; | |
| 288 | } | 294 | } |
| 289 | 295 | ||
| 290 | public void outputType4Individual(String iri) { | 296 | public UFS<String> getEqualityGroups(boolean reuse) { |
| 291 | outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); | 297 | if(reuse && equalityGroups != null) return equalityGroups; |
| 292 | } | 298 | |
| 293 | 299 | equalityGroups = new UFS<String>(); | |
| 294 | public int getSameAsNumber() { | 300 | |
| 295 | TupleIterator iter = null; | 301 | TupleIterator answers = null; |
| 296 | int counter = 0; | 302 | try { |
| 297 | try { | 303 | Timer t = new Timer(); |
| 298 | iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); | 304 | answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }"); |
| 299 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) | 305 | for(long multi = answers.open(); multi != 0; multi = answers.getNext()) { |
| 300 | if (iter.getResourceID(0) != iter.getResourceID(1)) | 306 | if(answers.getResourceID(0) != answers.getResourceID(1)) |
| 301 | ++counter; | 307 | equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); |
| 302 | } catch (JRDFStoreException e) { | 308 | } |
| 303 | e.printStackTrace(); | 309 | Utility.logInfo("@Time to group individuals by equality: " + t.duration()); |
| 304 | } finally { | 310 | } catch(JRDFStoreException e) { |
| 305 | if (iter != null) iter.dispose(); | 311 | e.printStackTrace(); |
| 306 | } | 312 | } finally { |
| 307 | return counter; | 313 | if(answers != null) answers.dispose(); |
| 308 | } | 314 | } |
| 309 | 315 | ||
| 310 | private UFS<String> equalityGroups = null; | 316 | return equalityGroups; |
| 311 | 317 | } | |
| 312 | public UFS<String> getEqualityGroups(boolean reuse) { | 318 | |
| 313 | if (reuse && equalityGroups != null) return equalityGroups; | 319 | public void clearRulesAndIDBFacts(Collection<int[]> collection) { |
| 314 | 320 | // performDeletion(collection); | |
| 315 | equalityGroups = new UFS<String>(); | 321 | collection.clear(); |
| 316 | 322 | try { | |
| 317 | TupleIterator answers = null; | 323 | store.clearRulesAndMakeFactsExplicit(); |
| 318 | try { | 324 | } catch(JRDFStoreException e) { |
| 319 | Timer t = new Timer(); | 325 | e.printStackTrace(); |
| 320 | answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }"); | 326 | } |
| 321 | for (long multi = answers.open(); multi != 0; multi = answers.getNext()) { | 327 | } |
| 322 | if (answers.getResourceID(0) != answers.getResourceID(1)) | 328 | |
| 323 | equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); | 329 | protected void outputClassAssertions(String filename) { |
| 324 | } | 330 | TupleIterator allTuples = null; |
| 325 | Utility.logInfo("@Time to group individuals by equality: " + t.duration()); | 331 | boolean redirect = false; |
| 326 | } catch (JRDFStoreException e) { | 332 | try { |
| 327 | e.printStackTrace(); | 333 | allTuples = |
| 328 | } finally { | 334 | getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); |
| 329 | if (answers != null) answers.dispose(); | 335 | redirect = Utility.redirectCurrentOut(filename); |
| 330 | } | 336 | for(long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) |
| 331 | 337 | System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager | |
| 332 | return equalityGroups; | 338 | .getQuotedTerm(allTuples.getResource(1))); |
| 333 | } | 339 | } catch(JRDFStoreException e) { |
| 334 | 340 | e.printStackTrace(); | |
| 335 | public void clearRulesAndIDBFacts(Collection<int[]> collection) { | 341 | } finally { |
| 336 | // performDeletion(collection); | 342 | if(redirect) Utility.closeCurrentOut(); |
| 337 | collection.clear(); | 343 | if(allTuples != null) allTuples.dispose(); |
| 338 | try { | 344 | } |
| 339 | store.clearRulesAndMakeFactsExplicit(); | 345 | } |
| 340 | } catch (JRDFStoreException e) { | 346 | |
| 341 | e.printStackTrace(); | 347 | @SuppressWarnings("unused") |
| 342 | } | 348 | private void performDeletion(Collection<int[]> collection) { |
| 343 | } | 349 | Utility.logInfo("Remove all rules, idb facts and added staff..."); |
| 344 | 350 | Timer timer = new Timer(); | |
| 345 | @SuppressWarnings("unused") | 351 | TupleIterator iter = null; |
| 346 | private void performDeletion(Collection<int[]> collection) { | 352 | try { |
| 347 | Utility.logInfo("Remove all rules, idb facts and added staff..."); | 353 | UpdateType ut = UpdateType.ScheduleForDeletion; |
| 348 | Timer timer = new Timer(); | 354 | for(int[] t : collection) |
| 349 | TupleIterator iter = null; | 355 | store.addTriplesByResourceIDs(t, ut); |
| 350 | try { | 356 | |
| 351 | UpdateType ut = UpdateType.ScheduleForDeletion; | 357 | try { |
| 352 | for (int[] t: collection) | 358 | iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); |
| 353 | store.addTriplesByResourceIDs(t, ut); | 359 | for(long multi = iter.open(); multi != 0; multi = iter.getNext()) { |
| 354 | 360 | int[] triple = new int[3]; | |
| 355 | try { | 361 | for(int i = 0; i < 3; ++i) |
| 356 | iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); | 362 | triple[i] = iter.getResourceID(i); |
| 357 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { | 363 | store.addTriplesByResourceIDs(triple, ut); |
| 358 | int[] triple = new int[3]; | 364 | } |
| 359 | for (int i = 0; i < 3; ++i) | 365 | } finally { |
| 360 | triple[i] = iter.getResourceID(i); | 366 | if(iter != null) iter.dispose(); |
| 361 | store.addTriplesByResourceIDs(triple, ut); | 367 | iter = null; |
| 362 | } | 368 | } |
| 363 | } finally { | 369 | store.applyReasoning(true); |
| 364 | if (iter != null) iter.dispose(); | 370 | } catch(JRDFStoreException e) { |
| 365 | iter = null; | 371 | e.printStackTrace(); |
| 366 | } | 372 | } |
| 367 | store.applyReasoning(true); | 373 | Utility.logInfo("Time for deletion: " + timer.duration()); |
| 368 | } catch (JRDFStoreException e) { | 374 | } |
| 369 | e.printStackTrace(); | ||
| 370 | } | ||
| 371 | Utility.logInfo("Time for deletion: " + timer.duration()); | ||
| 372 | } | ||
| 373 | 375 | ||
| 374 | 376 | ||
| 375 | } | 377 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java index f068164..98f0c35 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java | |||
| @@ -12,6 +12,7 @@ import uk.ac.ox.cs.pagoda.query.AnswerTuples; | |||
| 12 | import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp; | 12 | import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp; |
| 13 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | 13 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; |
| 14 | import uk.ac.ox.cs.pagoda.util.Utility; | 14 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 15 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 15 | 16 | ||
| 16 | import java.io.File; | 17 | import java.io.File; |
| 17 | import java.io.FileNotFoundException; | 18 | import java.io.FileNotFoundException; |
| @@ -19,82 +20,90 @@ import java.nio.file.Paths; | |||
| 19 | import java.util.Set; | 20 | import java.util.Set; |
| 20 | 21 | ||
| 21 | public class KarmaQueryEngine extends RDFoxQueryEngine { | 22 | public class KarmaQueryEngine extends RDFoxQueryEngine { |
| 22 | 23 | ||
| 23 | private MyKarma reasoner = null; | 24 | String karmaDataFile = null, karmaRuleFile = null; |
| 24 | 25 | private MyKarma reasoner = null; | |
| 25 | String karmaDataFile = null, karmaRuleFile = null; | 26 | |
| 26 | 27 | public KarmaQueryEngine(String name) { | |
| 27 | public KarmaQueryEngine(String name) { | 28 | super(name); |
| 28 | super(name); | 29 | |
| 29 | |||
| 30 | // int Base = 1 << 6; | 30 | // int Base = 1 << 6; |
| 31 | // int index = (new Random().nextInt() % Base + Base) % Base; | 31 | // int index = (new Random().nextInt() % Base + Base) % Base; |
| 32 | // karmaDataFile = "karma_data" + index + ".ttl"; | 32 | // karmaDataFile = "karma_data" + index + ".ttl"; |
| 33 | // karmaRuleFile = "karma_rule" + index + ".dlog"; | 33 | // karmaRuleFile = "karma_rule" + index + ".dlog"; |
| 34 | karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString(); | 34 | karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString(); |
| 35 | karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString(); | 35 | karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString(); |
| 36 | 36 | ||
| 37 | reasoner = new MyKarma(); | 37 | reasoner = new MyKarma(); |
| 38 | } | 38 | } |
| 39 | 39 | ||
| 40 | public MyKarma getReasoner() { | 40 | public MyKarma getReasoner() { |
| 41 | return reasoner; | 41 | if(isDisposed()) throw new DisposedException(); |
| 42 | } | 42 | return reasoner; |
| 43 | 43 | } | |
| 44 | public void processOntology(OWLOntology elhoOntology) { | 44 | |
| 45 | try { | 45 | public void processOntology(OWLOntology elhoOntology) { |
| 46 | OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile)); | 46 | if(isDisposed()) throw new DisposedException(); |
| 47 | } catch (IllegalInputOntologyException e) { | 47 | try { |
| 48 | e.printStackTrace(); | 48 | OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile)); |
| 49 | } | 49 | } catch(IllegalInputOntologyException e) { |
| 50 | } | 50 | e.printStackTrace(); |
| 51 | 51 | } | |
| 52 | @Override | 52 | } |
| 53 | public void dispose() { | 53 | |
| 54 | reasoner.dispose(); | 54 | @Override |
| 55 | } | 55 | public void dispose() { |
| 56 | 56 | super.dispose(); | |
| 57 | @Override | 57 | reasoner.dispose(); |
| 58 | public AnswerTuples evaluate(String queryText) { | 58 | } |
| 59 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null); | 59 | |
| 60 | } | 60 | @Override |
| 61 | 61 | public AnswerTuples evaluate(String queryText) { | |
| 62 | @Override | 62 | if(isDisposed()) throw new DisposedException(); |
| 63 | public AnswerTuples evaluate(String queryText, String[] answerVars) { | 63 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null); |
| 64 | return evaluate(queryText, answerVars, null); | 64 | } |
| 65 | } | 65 | |
| 66 | 66 | @Override | |
| 67 | public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) { | 67 | public AnswerTuples evaluate(String queryText, String[] answerVars) { |
| 68 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples); | 68 | if(isDisposed()) throw new DisposedException(); |
| 69 | } | 69 | return evaluate(queryText, answerVars, null); |
| 70 | 70 | } | |
| 71 | public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) { | 71 | |
| 72 | KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?")); | 72 | public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) { |
| 73 | reasoner.setConcurrence(false); | 73 | if(isDisposed()) throw new DisposedException(); |
| 74 | ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery(); | 74 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples); |
| 75 | if (cq == null) return null; | 75 | } |
| 76 | Set<AnswerTuple> answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:")); | 76 | |
| 77 | return new AnswerTuplesImp(answerVars, answers); | 77 | public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) { |
| 78 | } | 78 | if(isDisposed()) throw new DisposedException(); |
| 79 | 79 | KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?")); | |
| 80 | @Override | 80 | reasoner.setConcurrence(false); |
| 81 | public DataStore getDataStore() { | 81 | ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery(); |
| 82 | return reasoner.getStore(); | 82 | if(cq == null) return null; |
| 83 | } | 83 | Set<AnswerTuple> answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:")); |
| 84 | 84 | return new AnswerTuplesImp(answerVars, answers); | |
| 85 | public void initialiseKarma() { | 85 | } |
| 86 | try { | 86 | |
| 87 | reasoner.initializeData(new File(karmaDataFile)); | 87 | @Override |
| 88 | reasoner.materialise(new File(karmaRuleFile)); | 88 | public DataStore getDataStore() { |
| 89 | 89 | if(isDisposed()) throw new DisposedException(); | |
| 90 | File tmp; | 90 | return reasoner.getStore(); |
| 91 | if (karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete(); | 91 | } |
| 92 | if (karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete(); | 92 | |
| 93 | } catch (FileNotFoundException e) { | 93 | public void initialiseKarma() { |
| 94 | e.printStackTrace(); | 94 | if(isDisposed()) throw new DisposedException(); |
| 95 | } catch (JRDFStoreException e) { | 95 | try { |
| 96 | e.printStackTrace(); | 96 | reasoner.initializeData(new File(karmaDataFile)); |
| 97 | } | 97 | reasoner.materialise(new File(karmaRuleFile)); |
| 98 | } | 98 | |
| 99 | File tmp; | ||
| 100 | if(karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete(); | ||
| 101 | if(karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete(); | ||
| 102 | } catch(FileNotFoundException e) { | ||
| 103 | e.printStackTrace(); | ||
| 104 | } catch(JRDFStoreException e) { | ||
| 105 | e.printStackTrace(); | ||
| 106 | } | ||
| 107 | } | ||
| 99 | 108 | ||
| 100 | } | 109 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java index dd71809..1e8181f 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java | |||
| @@ -3,15 +3,14 @@ package uk.ac.ox.cs.pagoda.reasoner.light; | |||
| 3 | import org.semanticweb.HermiT.model.Constant; | 3 | import org.semanticweb.HermiT.model.Constant; |
| 4 | import org.semanticweb.HermiT.model.Individual; | 4 | import org.semanticweb.HermiT.model.Individual; |
| 5 | import org.semanticweb.HermiT.model.Term; | 5 | import org.semanticweb.HermiT.model.Term; |
| 6 | |||
| 7 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 8 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 9 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 10 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | 6 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; |
| 11 | import uk.ac.ox.cs.JRDFox.model.GroundTerm; | 7 | import uk.ac.ox.cs.JRDFox.model.GroundTerm; |
| 12 | import uk.ac.ox.cs.JRDFox.store.TupleIterator; | 8 | import uk.ac.ox.cs.JRDFox.store.TupleIterator; |
| 9 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 10 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 11 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 13 | 12 | ||
| 14 | public class RDFoxAnswerTuples implements AnswerTuples { | 13 | public class RDFoxAnswerTuples extends AnswerTuples { |
| 15 | 14 | ||
| 16 | long multi; | 15 | long multi; |
| 17 | TupleIterator m_iter; | 16 | TupleIterator m_iter; |
| @@ -22,7 +21,18 @@ public class RDFoxAnswerTuples implements AnswerTuples { | |||
| 22 | m_iter = iter; | 21 | m_iter = iter; |
| 23 | reset(); | 22 | reset(); |
| 24 | } | 23 | } |
| 25 | 24 | ||
| 25 | public static Term getHermitTerm(GroundTerm t) { | ||
| 26 | if(t instanceof uk.ac.ox.cs.JRDFox.model.Individual) { | ||
| 27 | uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t; | ||
| 28 | return Individual.create(individual.getIRI()); | ||
| 29 | } | ||
| 30 | else { | ||
| 31 | uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t); | ||
| 32 | return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI()); | ||
| 33 | } | ||
| 34 | } | ||
| 35 | |||
| 26 | @Override | 36 | @Override |
| 27 | public boolean isValid() { | 37 | public boolean isValid() { |
| 28 | return multi != 0; | 38 | return multi != 0; |
| @@ -34,7 +44,7 @@ public class RDFoxAnswerTuples implements AnswerTuples { | |||
| 34 | return m_iter.getArity(); | 44 | return m_iter.getArity(); |
| 35 | } catch (JRDFStoreException e) { | 45 | } catch (JRDFStoreException e) { |
| 36 | e.printStackTrace(); | 46 | e.printStackTrace(); |
| 37 | return -1; | 47 | return -1; |
| 38 | } | 48 | } |
| 39 | } | 49 | } |
| 40 | 50 | ||
| @@ -44,15 +54,12 @@ public class RDFoxAnswerTuples implements AnswerTuples { | |||
| 44 | multi = m_iter.getNext(); | 54 | multi = m_iter.getNext(); |
| 45 | } catch (JRDFStoreException e) { | 55 | } catch (JRDFStoreException e) { |
| 46 | e.printStackTrace(); | 56 | e.printStackTrace(); |
| 47 | } | 57 | } |
| 48 | } | 58 | } |
| 49 | 59 | ||
| 50 | @Override | 60 | @Override |
| 51 | public void dispose() { | 61 | public void dispose() { |
| 52 | m_iter.dispose(); | 62 | super.dispose(); |
| 53 | } | ||
| 54 | |||
| 55 | protected void finalize() { | ||
| 56 | m_iter.dispose(); | 63 | m_iter.dispose(); |
| 57 | } | 64 | } |
| 58 | 65 | ||
| @@ -85,16 +92,9 @@ public class RDFoxAnswerTuples implements AnswerTuples { | |||
| 85 | public String[] getAnswerVariables() { | 92 | public String[] getAnswerVariables() { |
| 86 | return m_answerVars; | 93 | return m_answerVars; |
| 87 | } | 94 | } |
| 88 | 95 | ||
| 89 | public static Term getHermitTerm(GroundTerm t) { | 96 | protected void finalize() { |
| 90 | if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual) { | 97 | m_iter.dispose(); |
| 91 | uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t; | ||
| 92 | return Individual.create(individual.getIRI()); | ||
| 93 | } | ||
| 94 | else { | ||
| 95 | uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t); | ||
| 96 | return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI()); | ||
| 97 | } | ||
| 98 | } | 98 | } |
| 99 | 99 | ||
| 100 | } | 100 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java index 61500f5..f835ba9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java | |||
| @@ -13,119 +13,126 @@ import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; | |||
| 13 | import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; | 13 | import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; |
| 14 | import uk.ac.ox.cs.pagoda.util.Timer; | 14 | import uk.ac.ox.cs.pagoda.util.Timer; |
| 15 | import uk.ac.ox.cs.pagoda.util.Utility; | 15 | import uk.ac.ox.cs.pagoda.util.Utility; |
| 16 | import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; | ||
| 16 | 17 | ||
| 17 | import java.io.File; | 18 | import java.io.File; |
| 18 | import java.util.Collection; | 19 | import java.util.Collection; |
| 19 | 20 | ||
| 20 | public abstract class RDFoxQueryEngine implements QueryEngine { | 21 | public abstract class RDFoxQueryEngine extends QueryEngine { |
| 21 | |||
| 22 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; | ||
| 23 | protected String name; | ||
| 24 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); | ||
| 25 | 22 | ||
| 26 | public RDFoxQueryEngine(String name) { | 23 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; |
| 27 | this.name = name; | 24 | protected String name; |
| 28 | } | 25 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); |
| 29 | 26 | ||
| 30 | public static DataStore createDataStore() { | 27 | public RDFoxQueryEngine(String name) { |
| 31 | DataStore instance = null; | 28 | this.name = name; |
| 32 | try { | 29 | } |
| 30 | |||
| 31 | public static DataStore createDataStore() { | ||
| 32 | DataStore instance = null; | ||
| 33 | try { | ||
| 33 | // instance = new DataStore("par-head-n"); | 34 | // instance = new DataStore("par-head-n"); |
| 34 | instance = new DataStore(StoreType.NarrowParallelHead); | 35 | instance = new DataStore(StoreType.NarrowParallelHead); |
| 35 | instance.setNumberOfThreads(matNoOfThreads); | 36 | instance.setNumberOfThreads(matNoOfThreads); |
| 36 | instance.initialize(); | 37 | instance.initialize(); |
| 37 | } catch(JRDFStoreException e) { | 38 | } catch(JRDFStoreException e) { |
| 38 | e.printStackTrace(); | 39 | e.printStackTrace(); |
| 39 | } | 40 | } |
| 40 | return instance; | 41 | return instance; |
| 41 | } | 42 | } |
| 42 | 43 | ||
| 43 | public String getName() { | 44 | public String getName() { |
| 44 | return name; | 45 | if(isDisposed()) throw new DisposedException(); |
| 45 | } | 46 | return name; |
| 46 | 47 | } | |
| 47 | public abstract DataStore getDataStore(); | 48 | |
| 48 | 49 | public abstract DataStore getDataStore(); | |
| 49 | public abstract void dispose(); | 50 | |
| 50 | 51 | public void importRDFData(String fileName, String importedFile) { | |
| 51 | public void importRDFData(String fileName, String importedFile) { | 52 | if(isDisposed()) throw new DisposedException(); |
| 52 | if(importedFile == null || importedFile.isEmpty()) return; | 53 | if(importedFile == null || importedFile.isEmpty()) return; |
| 53 | Timer t = new Timer(); | 54 | Timer t = new Timer(); |
| 54 | DataStore store = getDataStore(); | 55 | DataStore store = getDataStore(); |
| 55 | try { | 56 | try { |
| 56 | long oldTripleCount = store.getTriplesCount(), tripleCount; | 57 | long oldTripleCount = store.getTriplesCount(), tripleCount; |
| 57 | for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) { | 58 | for(String file : importedFile.split(QueryReasoner.ImportDataFileSeparator)) { |
| 58 | store.importTurtleFile(new File(file), prefixes); | 59 | store.importTurtleFile(new File(file), prefixes); |
| 59 | } | 60 | } |
| 60 | tripleCount = store.getTriplesCount(); | 61 | tripleCount = store.getTriplesCount(); |
| 61 | Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); | 62 | Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); |
| 62 | store.clearRulesAndMakeFactsExplicit(); | 63 | store.clearRulesAndMakeFactsExplicit(); |
| 63 | } catch (JRDFStoreException e) { | 64 | } catch(JRDFStoreException e) { |
| 64 | e.printStackTrace(); | 65 | e.printStackTrace(); |
| 65 | } | 66 | } |
| 66 | Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); | 67 | Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); |
| 67 | } | 68 | } |
| 68 | 69 | ||
| 69 | public void importDataFromABoxOf(OWLOntology ontology) { | 70 | public void importDataFromABoxOf(OWLOntology ontology) { |
| 70 | DataStore store = getDataStore(); | 71 | if(isDisposed()) throw new DisposedException(); |
| 71 | try { | 72 | DataStore store = getDataStore(); |
| 72 | long prevTriplesCount = store.getTriplesCount(); | 73 | try { |
| 73 | store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); | 74 | long prevTriplesCount = store.getTriplesCount(); |
| 74 | long loadedTriples = store.getTriplesCount() - prevTriplesCount; | 75 | store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); |
| 75 | Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) | 76 | long loadedTriples = store.getTriplesCount() - prevTriplesCount; |
| 76 | .size() + " ABox axioms"); | 77 | Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) |
| 77 | } catch(JRDFStoreException | OWLOntologyCreationException e) { | 78 | .size() + " ABox axioms"); |
| 78 | e.printStackTrace(); | 79 | } catch(JRDFStoreException | OWLOntologyCreationException e) { |
| 79 | System.exit(1); | 80 | e.printStackTrace(); |
| 80 | } | 81 | System.exit(1); |
| 81 | 82 | } | |
| 82 | } | 83 | |
| 83 | 84 | } | |
| 84 | public void materialise(String programName, String programText) { | 85 | |
| 85 | if(programText == null) return; | 86 | public void materialise(String programName, String programText) { |
| 86 | Timer t = new Timer(); | 87 | if(isDisposed()) throw new DisposedException(); |
| 87 | DataStore store = getDataStore(); | 88 | if(programText == null) return; |
| 88 | try { | 89 | Timer t = new Timer(); |
| 89 | long oldTripleCount = store.getTriplesCount(), tripleCount; | 90 | DataStore store = getDataStore(); |
| 91 | try { | ||
| 92 | long oldTripleCount = store.getTriplesCount(), tripleCount; | ||
| 90 | // store.addRules(new String[] {programText}); | 93 | // store.addRules(new String[] {programText}); |
| 91 | store.importRules(programText); | 94 | store.importRules(programText); |
| 92 | store.applyReasoning(); | 95 | store.applyReasoning(); |
| 93 | tripleCount = store.getTriplesCount(); | 96 | tripleCount = store.getTriplesCount(); |
| 94 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); | 97 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); |
| 95 | store.clearRulesAndMakeFactsExplicit(); | 98 | store.clearRulesAndMakeFactsExplicit(); |
| 96 | } catch (JRDFStoreException e) { | 99 | } catch(JRDFStoreException e) { |
| 97 | e.printStackTrace(); | 100 | e.printStackTrace(); |
| 98 | } | 101 | } |
| 99 | Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds."); | 102 | Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds."); |
| 100 | } | 103 | } |
| 101 | 104 | ||
| 102 | @Override | 105 | @Override |
| 103 | public void evaluate(Collection<String> queryTexts, String answerFile) { | 106 | public void evaluate(Collection<String> queryTexts, String answerFile) { |
| 104 | if (queryTexts == null) | 107 | if(isDisposed()) throw new DisposedException(); |
| 105 | return ; | 108 | if(queryTexts == null) |
| 106 | 109 | return; | |
| 107 | int queryID = 0; | 110 | |
| 108 | AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); | 111 | int queryID = 0; |
| 109 | AnswerTuples answerTuples; | 112 | AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); |
| 110 | Timer t = new Timer(); | 113 | AnswerTuples answerTuples; |
| 111 | try { | 114 | Timer t = new Timer(); |
| 112 | for (String query: queryTexts) { | 115 | try { |
| 113 | t.reset(); | 116 | for(String query : queryTexts) { |
| 114 | answerTuples = null; | 117 | t.reset(); |
| 115 | try { | 118 | answerTuples = null; |
| 116 | answerTuples = evaluate(query); | 119 | try { |
| 117 | Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); | 120 | answerTuples = evaluate(query); |
| 118 | answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); | 121 | Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); |
| 119 | } finally { | 122 | answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); |
| 120 | if (answerTuples != null) answerTuples.dispose(); | 123 | } finally { |
| 121 | } | 124 | if(answerTuples != null) answerTuples.dispose(); |
| 122 | } | 125 | } |
| 123 | } finally { | 126 | } |
| 124 | answerWriter.close(); | 127 | } finally { |
| 125 | } | 128 | answerWriter.close(); |
| 126 | 129 | } | |
| 127 | Utility.logDebug("done computing query answers by RDFox."); | 130 | |
| 128 | 131 | Utility.logDebug("done computing query answers by RDFox."); | |
| 129 | } | 132 | } |
| 130 | 133 | ||
| 134 | @Override | ||
| 135 | public void dispose() { | ||
| 136 | super.dispose(); | ||
| 137 | } | ||
| 131 | } | 138 | } |
