diff options
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner')
20 files changed, 2949 insertions, 0 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java new file mode 100644 index 0000000..a222645 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java | |||
| @@ -0,0 +1,269 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import java.util.LinkedList; | ||
| 4 | |||
| 5 | import org.semanticweb.HermiT.model.Atom; | ||
| 6 | import org.semanticweb.HermiT.model.AtomicConcept; | ||
| 7 | import org.semanticweb.HermiT.model.DLClause; | ||
| 8 | import org.semanticweb.HermiT.model.Variable; | ||
| 9 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 10 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; | ||
| 11 | import org.semanticweb.owlapi.model.OWLOntologyManager; | ||
| 12 | |||
| 13 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; | ||
| 14 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 15 | import uk.ac.ox.cs.pagoda.query.QueryManager; | ||
| 16 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 17 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; | ||
| 18 | import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; | ||
| 19 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; | ||
| 20 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | ||
| 21 | import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; | ||
| 22 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 23 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 24 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 25 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 26 | |||
| 27 | public class ConsistencyManager { | ||
| 28 | |||
| 29 | protected MyQueryReasoner m_reasoner; | ||
| 30 | protected QueryManager m_queryManager; | ||
| 31 | |||
| 32 | Timer t = new Timer(); | ||
| 33 | |||
| 34 | public ConsistencyManager(MyQueryReasoner reasoner) { | ||
| 35 | m_reasoner = reasoner; | ||
| 36 | m_queryManager = reasoner.getQueryManager(); | ||
| 37 | } | ||
| 38 | |||
| 39 | QueryRecord fullQueryRecord; | ||
| 40 | QueryRecord[] botQueryRecords; | ||
| 41 | |||
| 42 | LinkedList<DLClause> toAddClauses; | ||
| 43 | |||
| 44 | boolean checkRLLowerBound() { | ||
| 45 | fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); | ||
| 46 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.rlLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | ||
| 47 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | ||
| 48 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | ||
| 49 | return unsatisfiability(t.duration()); | ||
| 50 | } | ||
| 51 | return true; | ||
| 52 | } | ||
| 53 | |||
| 54 | boolean checkELLowerBound() { | ||
| 55 | fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | ||
| 56 | if (fullQueryRecord.getNoOfSoundAnswers() > 0) { | ||
| 57 | Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); | ||
| 58 | return unsatisfiability(t.duration()); | ||
| 59 | } | ||
| 60 | return true; | ||
| 61 | } | ||
| 62 | |||
| 63 | boolean checkLazyUpper() { | ||
| 64 | if (m_reasoner.lazyUpperStore != null) { | ||
| 65 | AnswerTuples tuples = null; | ||
| 66 | try { | ||
| 67 | tuples = m_reasoner.lazyUpperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); | ||
| 68 | if (!tuples.isValid()) { | ||
| 69 | Utility.logInfo("There are no contradictions derived in the lazy upper bound materialisation."); | ||
| 70 | return satisfiability(t.duration()); | ||
| 71 | } | ||
| 72 | } | ||
| 73 | finally { | ||
| 74 | if (tuples != null) tuples.dispose(); | ||
| 75 | } | ||
| 76 | } | ||
| 77 | return false; | ||
| 78 | } | ||
| 79 | |||
| 80 | boolean check() { | ||
| 81 | // if (!checkRLLowerBound()) return false; | ||
| 82 | // if (!checkELLowerBound()) return false; | ||
| 83 | // if (checkLazyUpper()) return true; | ||
| 84 | |||
| 85 | fullQueryRecord.updateUpperBoundAnswers(m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | ||
| 86 | if (fullQueryRecord.getNoOfCompleteAnswers() == 0) | ||
| 87 | return satisfiability(t.duration()); | ||
| 88 | |||
| 89 | extractBottomFragment(); | ||
| 90 | |||
| 91 | try { | ||
| 92 | extractAxioms4Full(); | ||
| 93 | } catch (OWLOntologyCreationException e) { | ||
| 94 | e.printStackTrace(); | ||
| 95 | } | ||
| 96 | fullQueryRecord.saveRelevantClause(); | ||
| 97 | |||
| 98 | boolean satisfiability; | ||
| 99 | |||
| 100 | Checker checker; | ||
| 101 | for (QueryRecord r: getQueryRecords()) { | ||
| 102 | // TODO to be removed ... | ||
| 103 | // r.saveRelevantOntology("bottom" + r.getQueryID() + ".owl"); | ||
| 104 | checker = new HermitSummaryFilter(r); // m_reasoner.factory.getSummarisedReasoner(r); | ||
| 105 | satisfiability = checker.isConsistent(); | ||
| 106 | checker.dispose(); | ||
| 107 | if (!satisfiability) return unsatisfiability(t.duration()); | ||
| 108 | } | ||
| 109 | |||
| 110 | // Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); | ||
| 111 | // boolean satisfiable = checker.isConsistent(); | ||
| 112 | // checker.dispose(); | ||
| 113 | // if (!satisfiable) return unsatisfiability(t.duration()); | ||
| 114 | |||
| 115 | return satisfiability(t.duration()); | ||
| 116 | } | ||
| 117 | |||
| 118 | protected boolean unsatisfiability(double duration) { | ||
| 119 | fullQueryRecord.dispose(); | ||
| 120 | Utility.logDebug("The ontology and dataset is unsatisfiable."); | ||
| 121 | return false; | ||
| 122 | } | ||
| 123 | |||
| 124 | protected boolean satisfiability(double duration) { | ||
| 125 | fullQueryRecord.dispose(); | ||
| 126 | Utility.logDebug("The ontology and dataset is satisfiable."); | ||
| 127 | return true; | ||
| 128 | } | ||
| 129 | |||
| 130 | boolean fragmentExtracted = false; | ||
| 131 | |||
| 132 | public void extractBottomFragment() { | ||
| 133 | if (fragmentExtracted) return ; | ||
| 134 | fragmentExtracted = true; | ||
| 135 | |||
| 136 | UpperDatalogProgram upperProgram = m_reasoner.program.getUpper(); | ||
| 137 | int number = upperProgram.getBottomNumber(); | ||
| 138 | |||
| 139 | if (number <= 1) { | ||
| 140 | botQueryRecords = new QueryRecord[] { fullQueryRecord }; | ||
| 141 | } | ||
| 142 | else { | ||
| 143 | QueryRecord[] tempQueryRecords = new QueryRecord[number - 1]; | ||
| 144 | QueryRecord record; | ||
| 145 | for (int i = 0; i < number - 1; ++i) { | ||
| 146 | tempQueryRecords[i] = record = m_queryManager.create(QueryRecord.botQueryText.replace("Nothing", "Nothing" + (i + 1)), 0, i + 1); | ||
| 147 | record.updateUpperBoundAnswers(m_reasoner.trackingStore.evaluate(record.getQueryText(), record.getAnswerVariables())); | ||
| 148 | } | ||
| 149 | |||
| 150 | int bottomNumber = 0; | ||
| 151 | int[] group = new int[number - 1]; | ||
| 152 | for (int i = 0; i < number - 1; ++i) group[i] = i; | ||
| 153 | for (int i = 0; i < number - 1; ++i) | ||
| 154 | if (tempQueryRecords[i].processed()) tempQueryRecords[i].dispose(); | ||
| 155 | else if (group[i] == i) { | ||
| 156 | ++bottomNumber; | ||
| 157 | record = tempQueryRecords[i]; | ||
| 158 | for (int j = i + 1; j < number - 1; ++j) | ||
| 159 | if (record.hasSameGapAnswers(tempQueryRecords[j])) | ||
| 160 | group[j] = i; | ||
| 161 | } | ||
| 162 | |||
| 163 | Utility.logInfo("There are " + bottomNumber + " different bottom fragments."); | ||
| 164 | toAddClauses = new LinkedList<DLClause>(); | ||
| 165 | int bottomCounter = 0; | ||
| 166 | botQueryRecords = new QueryRecord[bottomNumber]; | ||
| 167 | Variable X = Variable.create("X"); | ||
| 168 | for (int i = 0; i < number - 1; ++i) | ||
| 169 | if (!tempQueryRecords[i].processed()) | ||
| 170 | if (group[i] == i) { | ||
| 171 | botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; | ||
| 172 | record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter); | ||
| 173 | toAddClauses.add( | ||
| 174 | DLClause.create( | ||
| 175 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + bottomCounter), X)}, | ||
| 176 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); | ||
| 177 | } | ||
| 178 | else { | ||
| 179 | toAddClauses.add( | ||
| 180 | DLClause.create( | ||
| 181 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + group[group[i]]), X)}, | ||
| 182 | new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); | ||
| 183 | tempQueryRecords[i].dispose(); | ||
| 184 | } | ||
| 185 | |||
| 186 | upperProgram.updateDependencyGraph(toAddClauses); | ||
| 187 | } | ||
| 188 | |||
| 189 | String[] programs = collectTrackingProgramAndImport(); | ||
| 190 | if (programs.length == 0) | ||
| 191 | return ; | ||
| 192 | |||
| 193 | DataStore store = m_reasoner.trackingStore.getDataStore(); | ||
| 194 | long oldTripleCount, tripleCount; | ||
| 195 | try { | ||
| 196 | Timer t1 = new Timer(); | ||
| 197 | oldTripleCount = store.getTriplesCount(); | ||
| 198 | for (String program: programs) | ||
| 199 | store.importRules(program); | ||
| 200 | store.applyReasoning(true); | ||
| 201 | tripleCount = store.getTriplesCount(); | ||
| 202 | |||
| 203 | Utility.logInfo("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", | ||
| 204 | "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); | ||
| 205 | |||
| 206 | extractAxioms(); | ||
| 207 | store.clearRulesAndMakeFactsExplicit(); | ||
| 208 | } catch (JRDFStoreException e) { | ||
| 209 | e.printStackTrace(); | ||
| 210 | } catch (OWLOntologyCreationException e) { | ||
| 211 | e.printStackTrace(); | ||
| 212 | } | ||
| 213 | } | ||
| 214 | |||
| 215 | private void extractAxioms4Full() throws OWLOntologyCreationException { | ||
| 216 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); | ||
| 217 | OWLOntology fullOntology = manager.createOntology(); | ||
| 218 | for (QueryRecord record: botQueryRecords) { | ||
| 219 | for (DLClause clause: record.getRelevantClauses()) { | ||
| 220 | fullQueryRecord.addRelevantClauses(clause); | ||
| 221 | } | ||
| 222 | manager.addAxioms(fullOntology, record.getRelevantOntology().getAxioms()); | ||
| 223 | } | ||
| 224 | fullQueryRecord.setRelevantOntology(fullOntology); | ||
| 225 | } | ||
| 226 | |||
| 227 | private void extractAxioms() throws OWLOntologyCreationException { | ||
| 228 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); | ||
| 229 | for (QueryRecord record: botQueryRecords) { | ||
| 230 | record.setRelevantOntology(manager.createOntology()); | ||
| 231 | QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, record); | ||
| 232 | m_reasoner.encoder.setCurrentQuery(record); | ||
| 233 | tracker.extractAxioms(m_reasoner.trackingStore); | ||
| 234 | // record.saveRelevantClause(); | ||
| 235 | // record.saveRelevantOntology("bottom" + record.getQueryID() + ".owl"); | ||
| 236 | Utility.logInfo("finish extracting axioms for bottom " + record.getQueryID()); | ||
| 237 | } | ||
| 238 | } | ||
| 239 | |||
| 240 | private String[] collectTrackingProgramAndImport() { | ||
| 241 | String[] programs = new String[botQueryRecords.length]; | ||
| 242 | TrackingRuleEncoder encoder = m_reasoner.encoder; | ||
| 243 | |||
| 244 | StringBuilder builder; | ||
| 245 | LinkedList<DLClause> currentClauses = new LinkedList<DLClause>(); | ||
| 246 | |||
| 247 | for (int i = 0; i < botQueryRecords.length; ++i) { | ||
| 248 | encoder.setCurrentQuery(botQueryRecords[i]); | ||
| 249 | builder = new StringBuilder(encoder.getTrackingProgram()); | ||
| 250 | // encoder.saveTrackingRules("tracking_bottom" + (i + 1) + ".dlog"); | ||
| 251 | |||
| 252 | for (DLClause clause: toAddClauses) | ||
| 253 | if (clause.getHeadAtom(0).getDLPredicate().toString().contains("_final" + (i + 1))) | ||
| 254 | currentClauses.add(clause); | ||
| 255 | |||
| 256 | builder.append(DLClauseHelper.toString(currentClauses)); | ||
| 257 | programs[i] = builder.toString(); | ||
| 258 | |||
| 259 | currentClauses.clear(); | ||
| 260 | } | ||
| 261 | |||
| 262 | return programs; | ||
| 263 | } | ||
| 264 | |||
| 265 | public QueryRecord[] getQueryRecords() { | ||
| 266 | return botQueryRecords; | ||
| 267 | } | ||
| 268 | |||
| 269 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java new file mode 100644 index 0000000..67dc4fc --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java | |||
| @@ -0,0 +1,70 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; | ||
| 4 | import org.semanticweb.owlapi.model.OWLOntologyManager; | ||
| 5 | |||
| 6 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 7 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; | ||
| 8 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; | ||
| 9 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | ||
| 10 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 11 | |||
| 12 | @Deprecated | ||
| 13 | public class ConsistencyManager2 extends ConsistencyManager { | ||
| 14 | |||
| 15 | public ConsistencyManager2(MyQueryReasoner reasoner) { | ||
| 16 | super(reasoner); | ||
| 17 | fragmentExtracted = true; | ||
| 18 | } | ||
| 19 | |||
| 20 | protected boolean unsatisfiability(double duration) { | ||
| 21 | Utility.logDebug("The ontology and dataset is unsatisfiable."); | ||
| 22 | return false; | ||
| 23 | } | ||
| 24 | |||
| 25 | protected boolean satisfiability(double duration) { | ||
| 26 | Utility.logDebug("The ontology and dataset is satisfiable."); | ||
| 27 | return true; | ||
| 28 | } | ||
| 29 | |||
| 30 | @Override | ||
| 31 | boolean check() { | ||
| 32 | // if (!checkRLLowerBound()) return false; | ||
| 33 | // if (!checkELLowerBound()) return false; | ||
| 34 | if (checkLazyUpper()) return true; | ||
| 35 | |||
| 36 | fullQueryRecord.updateUpperBoundAnswers(m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); | ||
| 37 | if (fullQueryRecord.getNoOfCompleteAnswers() == 0) | ||
| 38 | return satisfiability(t.duration()); | ||
| 39 | |||
| 40 | try { | ||
| 41 | extractAxioms(); | ||
| 42 | } catch (OWLOntologyCreationException e) { | ||
| 43 | e.printStackTrace(); | ||
| 44 | } | ||
| 45 | |||
| 46 | Checker checker = new HermitSummaryFilter(fullQueryRecord); // m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); | ||
| 47 | // fullQueryRecord.saveRelevantOntology("fragment_bottom.owl"); | ||
| 48 | boolean satisfiable = checker.isConsistent(); | ||
| 49 | checker.dispose(); | ||
| 50 | if (!satisfiable) return unsatisfiability(t.duration()); | ||
| 51 | |||
| 52 | return satisfiability(t.duration()); | ||
| 53 | } | ||
| 54 | |||
| 55 | private void extractAxioms() throws OWLOntologyCreationException { | ||
| 56 | OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); | ||
| 57 | fullQueryRecord.setRelevantOntology(manager.createOntology()); | ||
| 58 | QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, fullQueryRecord); | ||
| 59 | m_reasoner.encoder.setCurrentQuery(fullQueryRecord); | ||
| 60 | tracker.extract(m_reasoner.trackingStore, null, true); | ||
| 61 | } | ||
| 62 | |||
| 63 | @Override | ||
| 64 | public QueryRecord[] getQueryRecords() { | ||
| 65 | if (botQueryRecords == null) | ||
| 66 | botQueryRecords = new QueryRecord[] {fullQueryRecord}; | ||
| 67 | return botQueryRecords; | ||
| 68 | } | ||
| 69 | |||
| 70 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java new file mode 100644 index 0000000..4ebe5f2 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java | |||
| @@ -0,0 +1,96 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import org.semanticweb.karma2.profile.ELHOProfile; | ||
| 4 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 5 | |||
| 6 | import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; | ||
| 7 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 8 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 9 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; | ||
| 10 | import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; | ||
| 11 | import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; | ||
| 12 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 13 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 14 | |||
| 15 | public class ELHOQueryReasoner extends QueryReasoner { | ||
| 16 | |||
| 17 | LowerDatalogProgram program; | ||
| 18 | |||
| 19 | OWLOntology elho_ontology; | ||
| 20 | KarmaQueryEngine elLowerStore = null; | ||
| 21 | |||
| 22 | private Timer t = new Timer(); | ||
| 23 | |||
| 24 | public ELHOQueryReasoner() { | ||
| 25 | elLowerStore = new KarmaQueryEngine("el"); | ||
| 26 | } | ||
| 27 | |||
| 28 | @Override | ||
| 29 | public void evaluate(QueryRecord queryRecord) { | ||
| 30 | AnswerTuples elAnswer = null; | ||
| 31 | t.reset(); | ||
| 32 | try { | ||
| 33 | elAnswer = elLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 34 | queryRecord.updateLowerBoundAnswers(elAnswer); | ||
| 35 | } finally { | ||
| 36 | if (elAnswer != null) elAnswer.dispose(); | ||
| 37 | } | ||
| 38 | queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); | ||
| 39 | |||
| 40 | queryRecord.setDifficulty(Step.ELLowerBound); | ||
| 41 | queryRecord.markAsProcessed(); | ||
| 42 | } | ||
| 43 | |||
| 44 | @Override | ||
| 45 | public void evaluateUpper(QueryRecord queryRecord) { | ||
| 46 | evaluate(queryRecord); | ||
| 47 | } | ||
| 48 | |||
| 49 | @Override | ||
| 50 | public void dispose() { | ||
| 51 | if (elLowerStore != null) elLowerStore.dispose(); | ||
| 52 | super.dispose(); | ||
| 53 | } | ||
| 54 | |||
| 55 | @Override | ||
| 56 | public void loadOntology(OWLOntology ontology) { | ||
| 57 | program = new LowerDatalogProgram(!forSemFacet); | ||
| 58 | program.load(ontology, new UnaryBottom()); | ||
| 59 | program.transform(); | ||
| 60 | |||
| 61 | importData(program.getAdditionalDataFile()); | ||
| 62 | |||
| 63 | elho_ontology = new ELHOProfile().getFragment(ontology); | ||
| 64 | elLowerStore.processOntology(elho_ontology); | ||
| 65 | } | ||
| 66 | |||
| 67 | @Override | ||
| 68 | public boolean preprocess() { | ||
| 69 | elLowerStore.importRDFData("data", importedData.toString()); | ||
| 70 | String rlLowerProgramText = program.toString(); | ||
| 71 | // program.save(); | ||
| 72 | elLowerStore.materialise("lower program", rlLowerProgramText); | ||
| 73 | elLowerStore.initialiseKarma(); | ||
| 74 | |||
| 75 | if (!isConsistent()) { | ||
| 76 | Utility.logDebug("The dataset is not consistent with the ontology."); | ||
| 77 | return false; | ||
| 78 | } | ||
| 79 | return true; | ||
| 80 | } | ||
| 81 | |||
| 82 | @Override | ||
| 83 | public boolean isConsistent() { | ||
| 84 | String[] X = new String[] {"X"}; | ||
| 85 | AnswerTuples ans = null; | ||
| 86 | try { | ||
| 87 | ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); | ||
| 88 | if (ans.isValid()) return false; | ||
| 89 | } finally { | ||
| 90 | if (ans != null) ans.dispose(); | ||
| 91 | } | ||
| 92 | |||
| 93 | return true; | ||
| 94 | } | ||
| 95 | |||
| 96 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java new file mode 100644 index 0000000..62d238b --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | |||
| @@ -0,0 +1,195 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import org.semanticweb.karma2.profile.ELHOProfile; | ||
| 4 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 5 | |||
| 6 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; | ||
| 7 | import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; | ||
| 8 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | ||
| 9 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 10 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 11 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; | ||
| 12 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 13 | import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; | ||
| 14 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | ||
| 15 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 16 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 17 | |||
| 18 | public class ELHOUQueryReasoner extends QueryReasoner { | ||
| 19 | |||
| 20 | DatalogProgram program; | ||
| 21 | |||
| 22 | BasicQueryEngine rlLowerStore; | ||
| 23 | BasicQueryEngine rlUpperStore; | ||
| 24 | |||
| 25 | OWLOntology elho_ontology; | ||
| 26 | KarmaQueryEngine elLowerStore = null; | ||
| 27 | |||
| 28 | boolean multiStageTag, equalityTag; | ||
| 29 | |||
| 30 | public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | ||
| 31 | this.multiStageTag = multiStageTag; | ||
| 32 | this.equalityTag = considerEqualities; | ||
| 33 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | ||
| 34 | elLowerStore = new KarmaQueryEngine("el-lower-bound"); | ||
| 35 | |||
| 36 | if (!multiStageTag) | ||
| 37 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); | ||
| 38 | else | ||
| 39 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); | ||
| 40 | } | ||
| 41 | |||
| 42 | private Timer t = new Timer(); | ||
| 43 | |||
| 44 | @Override | ||
| 45 | public void evaluate(QueryRecord queryRecord) { | ||
| 46 | AnswerTuples rlAnswer = null; | ||
| 47 | t.reset(); | ||
| 48 | try { | ||
| 49 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 50 | queryRecord.updateLowerBoundAnswers(rlAnswer); | ||
| 51 | } finally { | ||
| 52 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 53 | } | ||
| 54 | queryRecord.addProcessingTime(Step.LowerBound, t.duration()); | ||
| 55 | |||
| 56 | String extendedQueryText = queryRecord.getExtendedQueryText()[0]; | ||
| 57 | String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ? | ||
| 58 | new String[] {queryRecord.getQueryText()} : | ||
| 59 | new String[] {queryRecord.getQueryText(), extendedQueryText}; | ||
| 60 | |||
| 61 | for (String queryText: toQuery) { | ||
| 62 | rlAnswer = null; | ||
| 63 | t.reset(); | ||
| 64 | try { | ||
| 65 | rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables()); | ||
| 66 | queryRecord.updateUpperBoundAnswers(rlAnswer); | ||
| 67 | } finally { | ||
| 68 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 69 | } | ||
| 70 | queryRecord.addProcessingTime(Step.UpperBound, t.duration()); | ||
| 71 | |||
| 72 | if (queryRecord.processed()) { | ||
| 73 | queryRecord.setDifficulty(Step.UpperBound); | ||
| 74 | return ; | ||
| 75 | } | ||
| 76 | } | ||
| 77 | |||
| 78 | AnswerTuples elAnswer = null; | ||
| 79 | t.reset(); | ||
| 80 | try { | ||
| 81 | elAnswer = elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); | ||
| 82 | queryRecord.updateLowerBoundAnswers(elAnswer); | ||
| 83 | } finally { | ||
| 84 | if (elAnswer != null) elAnswer.dispose(); | ||
| 85 | } | ||
| 86 | queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); | ||
| 87 | } | ||
| 88 | |||
| 89 | @Override | ||
| 90 | public void evaluateUpper(QueryRecord queryRecord) { | ||
| 91 | AnswerTuples rlAnswer = null; | ||
| 92 | try { | ||
| 93 | rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 94 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); | ||
| 95 | } finally { | ||
| 96 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 97 | } | ||
| 98 | } | ||
| 99 | |||
| 100 | @Override | ||
| 101 | public void dispose() { | ||
| 102 | if (elLowerStore != null) elLowerStore.dispose(); | ||
| 103 | if (rlUpperStore != null) rlUpperStore.dispose(); | ||
| 104 | super.dispose(); | ||
| 105 | } | ||
| 106 | |||
| 107 | @Override | ||
| 108 | public void loadOntology(OWLOntology o) { | ||
| 109 | if (!equalityTag) { | ||
| 110 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | ||
| 111 | o = eliminator.getOutputOntology(); | ||
| 112 | eliminator.save(); | ||
| 113 | } | ||
| 114 | |||
| 115 | OWLOntology ontology = o; | ||
| 116 | program = new DatalogProgram(ontology, !forSemFacet); | ||
| 117 | |||
| 118 | importData(program.getAdditionalDataFile()); | ||
| 119 | |||
| 120 | elho_ontology = new ELHOProfile().getFragment(ontology); | ||
| 121 | elLowerStore.processOntology(elho_ontology); | ||
| 122 | originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); | ||
| 123 | } | ||
| 124 | |||
| 125 | String originalMarkProgram; | ||
| 126 | |||
| 127 | @Override | ||
| 128 | public boolean preprocess() { | ||
| 129 | String name = "data", datafile = importedData.toString(); | ||
| 130 | |||
| 131 | String lowername = "lower program"; | ||
| 132 | String rlLowerProgramText = program.getLower().toString(); | ||
| 133 | |||
| 134 | rlUpperStore.importRDFData(name, datafile); | ||
| 135 | rlUpperStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 136 | |||
| 137 | int flag = rlUpperStore.materialiseRestrictedly(program, null); | ||
| 138 | if (flag != 1) { | ||
| 139 | if (flag == -1) return false; | ||
| 140 | rlUpperStore.dispose(); | ||
| 141 | |||
| 142 | if (!multiStageTag) | ||
| 143 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); | ||
| 144 | else | ||
| 145 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); | ||
| 146 | rlUpperStore.importRDFData(name, datafile); | ||
| 147 | rlUpperStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 148 | rlUpperStore.materialiseFoldedly(program, null); | ||
| 149 | } | ||
| 150 | Utility.logInfo("upper store ready."); | ||
| 151 | |||
| 152 | rlLowerStore.importRDFData(name, datafile); | ||
| 153 | rlLowerStore.materialise(lowername, rlLowerProgramText); | ||
| 154 | Utility.logInfo("lower store ready."); | ||
| 155 | |||
| 156 | elLowerStore.importRDFData(name, datafile); | ||
| 157 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 158 | elLowerStore.materialise(lowername, rlLowerProgramText); | ||
| 159 | |||
| 160 | elLowerStore.initialiseKarma(); | ||
| 161 | Utility.logInfo("EL lower store ready."); | ||
| 162 | |||
| 163 | if (!isConsistent()) { | ||
| 164 | Utility.logInfo("The dataset is not consistent with the ontology."); | ||
| 165 | return false; | ||
| 166 | } | ||
| 167 | Utility.logInfo("The dataset is consistent."); | ||
| 168 | return true; | ||
| 169 | } | ||
| 170 | |||
| 171 | @Override | ||
| 172 | public boolean isConsistent() { | ||
| 173 | Utility.logInfo("Start checking consistency... "); | ||
| 174 | String[] X = new String[] {"X"}; | ||
| 175 | AnswerTuples ans = null; | ||
| 176 | try { | ||
| 177 | ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X); | ||
| 178 | if (!ans.isValid()) return true; | ||
| 179 | } finally { | ||
| 180 | if (ans != null) ans.dispose(); | ||
| 181 | } | ||
| 182 | |||
| 183 | ans = null; | ||
| 184 | try { | ||
| 185 | ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); | ||
| 186 | if (ans.isValid()) return false; | ||
| 187 | } finally { | ||
| 188 | if (ans != null) ans.dispose(); | ||
| 189 | } | ||
| 190 | |||
| 191 | Utility.logDebug("The consistency of the data has not been determined yet."); | ||
| 192 | return true; | ||
| 193 | } | ||
| 194 | |||
| 195 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java new file mode 100644 index 0000000..5511691 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java | |||
| @@ -0,0 +1,133 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import java.io.File; | ||
| 4 | import java.io.IOException; | ||
| 5 | import java.util.HashSet; | ||
| 6 | import java.util.Set; | ||
| 7 | |||
| 8 | import org.semanticweb.HermiT.Reasoner; | ||
| 9 | import org.semanticweb.owlapi.model.OWLClassExpression; | ||
| 10 | import org.semanticweb.owlapi.model.OWLDataFactory; | ||
| 11 | import org.semanticweb.owlapi.model.OWLNamedIndividual; | ||
| 12 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 13 | import org.semanticweb.owlapi.model.OWLOntologyCreationException; | ||
| 14 | import org.semanticweb.owlapi.model.OWLOntologyStorageException; | ||
| 15 | |||
| 16 | import uk.ac.ox.cs.JRDFox.model.Individual; | ||
| 17 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; | ||
| 18 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | ||
| 19 | import uk.ac.ox.cs.pagoda.owl.QueryRoller; | ||
| 20 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 21 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 22 | import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp; | ||
| 23 | import uk.ac.ox.cs.pagoda.query.GapByStore4ID; | ||
| 24 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 25 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 26 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | ||
| 27 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 28 | |||
| 29 | public class HermiTReasoner extends QueryReasoner { | ||
| 30 | |||
| 31 | Reasoner hermit; | ||
| 32 | |||
| 33 | BasicQueryEngine upperStore = null; | ||
| 34 | |||
| 35 | OWLOntology onto; | ||
| 36 | OWLDataFactory factory; | ||
| 37 | |||
| 38 | String importedOntologyPath = null; | ||
| 39 | |||
| 40 | QueryRoller roller; | ||
| 41 | boolean toCheckSatisfiability; | ||
| 42 | |||
| 43 | public HermiTReasoner(boolean toCheckSatisfiability) { | ||
| 44 | this.toCheckSatisfiability = toCheckSatisfiability; | ||
| 45 | } | ||
| 46 | |||
| 47 | @Override | ||
| 48 | public void loadOntology(OWLOntology ontology) { | ||
| 49 | onto = ontology; | ||
| 50 | } | ||
| 51 | |||
| 52 | @Override | ||
| 53 | public boolean preprocess() { | ||
| 54 | OWLOntology tbox = onto; | ||
| 55 | try { | ||
| 56 | onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); | ||
| 57 | importedOntologyPath = OWLHelper.getOntologyPath(onto); | ||
| 58 | } catch (OWLOntologyCreationException e) { | ||
| 59 | // TODO Auto-generated catch block | ||
| 60 | e.printStackTrace(); | ||
| 61 | } catch (OWLOntologyStorageException e) { | ||
| 62 | // TODO Auto-generated catch block | ||
| 63 | e.printStackTrace(); | ||
| 64 | } catch (IOException e) { | ||
| 65 | // TODO Auto-generated catch block | ||
| 66 | e.printStackTrace(); | ||
| 67 | } | ||
| 68 | |||
| 69 | DatalogProgram datalogProgram = new DatalogProgram(tbox, false); | ||
| 70 | importData(datalogProgram.getAdditionalDataFile()); | ||
| 71 | upperStore = new MultiStageQueryEngine("rl-upper", false); | ||
| 72 | upperStore.importRDFData("data", importedData.toString()); | ||
| 73 | GapByStore4ID gap = new GapByStore4ID(upperStore); | ||
| 74 | upperStore.materialiseFoldedly(datalogProgram, gap); | ||
| 75 | gap.clear(); | ||
| 76 | |||
| 77 | factory = onto.getOWLOntologyManager().getOWLDataFactory(); | ||
| 78 | roller = new QueryRoller(factory); | ||
| 79 | |||
| 80 | hermit = new Reasoner(onto); | ||
| 81 | return isConsistent(); | ||
| 82 | } | ||
| 83 | |||
| 84 | @Override | ||
| 85 | public boolean isConsistent() { | ||
| 86 | if (toCheckSatisfiability) | ||
| 87 | return hermit.isConsistent(); | ||
| 88 | return true; | ||
| 89 | } | ||
| 90 | |||
| 91 | @Override | ||
| 92 | public void evaluate(QueryRecord record) { | ||
| 93 | String[] disVars = record.getDistinguishedVariables(); | ||
| 94 | Set<OWLNamedIndividual> individuals = onto.getIndividualsInSignature(true); | ||
| 95 | if (disVars.length == 1) { | ||
| 96 | OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]); | ||
| 97 | Set<AnswerTuple> answers = new HashSet<AnswerTuple>(); | ||
| 98 | for (OWLNamedIndividual individual: individuals) { | ||
| 99 | Utility.logDebug("checking ... " + individual); | ||
| 100 | if (hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) { | ||
| 101 | answers.add(new AnswerTuple(new Individual[] {Individual.create(individual.toStringID())})); | ||
| 102 | } | ||
| 103 | } | ||
| 104 | record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers)); | ||
| 105 | record.markAsProcessed(); | ||
| 106 | } | ||
| 107 | else { | ||
| 108 | // FIXME join here | ||
| 109 | record.markAsProcessed(); | ||
| 110 | } | ||
| 111 | } | ||
| 112 | |||
| 113 | @Override | ||
| 114 | public void evaluateUpper(QueryRecord record) { | ||
| 115 | AnswerTuples rlAnswer = null; | ||
| 116 | try { | ||
| 117 | rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables()); | ||
| 118 | record.updateUpperBoundAnswers(rlAnswer, true); | ||
| 119 | } finally { | ||
| 120 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 121 | } | ||
| 122 | } | ||
| 123 | |||
| 124 | @Override | ||
| 125 | public void dispose() { | ||
| 126 | if (importedOntologyPath != null) { | ||
| 127 | File tmp = new File(importedOntologyPath); | ||
| 128 | if (tmp.exists()) tmp.delete(); | ||
| 129 | } | ||
| 130 | super.dispose(); | ||
| 131 | } | ||
| 132 | |||
| 133 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java b/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java new file mode 100644 index 0000000..ba6a4d4 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java | |||
| @@ -0,0 +1,106 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import java.io.File; | ||
| 4 | |||
| 5 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 6 | |||
| 7 | import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; | ||
| 8 | import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; | ||
| 9 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; | ||
| 10 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 11 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 12 | import uk.ac.ox.cs.pagoda.rules.GeneralProgram; | ||
| 13 | import uk.ac.ox.cs.pagoda.tracking.QueryTracker; | ||
| 14 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 15 | |||
| 16 | public class IterativeRefinement { | ||
| 17 | |||
| 18 | private static final int depthLimit = 1; | ||
| 19 | |||
| 20 | QueryRecord m_record; | ||
| 21 | QueryTracker m_tracker; | ||
| 22 | BasicQueryEngine m_trackingStore; | ||
| 23 | QueryRecord[] botQueryRecords; | ||
| 24 | |||
| 25 | int m_depth = 0; | ||
| 26 | |||
| 27 | public IterativeRefinement(QueryRecord queryRecord, QueryTracker tracker, BasicQueryEngine trackingStore, QueryRecord[] botQueryRecords) { | ||
| 28 | m_record = queryRecord; | ||
| 29 | m_tracker = tracker; | ||
| 30 | m_trackingStore = trackingStore; | ||
| 31 | this.botQueryRecords = botQueryRecords; | ||
| 32 | } | ||
| 33 | |||
| 34 | String tempDataFile = "temp.ttl"; | ||
| 35 | |||
| 36 | public OWLOntology extractWithFullABox(String dataset, BottomStrategy upperBottom) { | ||
| 37 | GeneralProgram program; | ||
| 38 | boolean update; | ||
| 39 | while (m_depth < depthLimit) { | ||
| 40 | ++m_depth; | ||
| 41 | program = new GeneralProgram(m_record.getRelevantClauses(), m_record.getRelevantOntology()); | ||
| 42 | |||
| 43 | MultiStageQueryEngine tEngine = new MultiStageQueryEngine("query-tracking", true); | ||
| 44 | try { | ||
| 45 | tEngine.importRDFData("data", dataset); | ||
| 46 | if (tEngine.materialise4SpecificQuery(program, m_record, upperBottom) != 1) { | ||
| 47 | return m_record.getRelevantOntology(); | ||
| 48 | } | ||
| 49 | |||
| 50 | update = m_record.updateUpperBoundAnswers(tEngine.evaluate(m_record.getQueryText())); | ||
| 51 | } finally { | ||
| 52 | tEngine.dispose(); | ||
| 53 | } | ||
| 54 | |||
| 55 | if (m_record.processed()) | ||
| 56 | return null; | ||
| 57 | |||
| 58 | if (!update) break; | ||
| 59 | |||
| 60 | m_record.updateSubID(); | ||
| 61 | m_tracker.extract(m_trackingStore, botQueryRecords, true); | ||
| 62 | } | ||
| 63 | |||
| 64 | return m_record.getRelevantOntology(); | ||
| 65 | } | ||
| 66 | |||
| 67 | public OWLOntology extract(UpperUnaryBottom upperBottom) { | ||
| 68 | GeneralProgram program; | ||
| 69 | boolean update; | ||
| 70 | while (m_depth < depthLimit) { | ||
| 71 | m_record.saveABoxInTurtle(tempDataFile); | ||
| 72 | program = new GeneralProgram(m_record.getRelevantClauses(), m_record.getRelevantOntology()); | ||
| 73 | |||
| 74 | MultiStageQueryEngine tEngine = new MultiStageQueryEngine("query-tracking", true); | ||
| 75 | try { | ||
| 76 | tEngine.importRDFData("fragment abox", tempDataFile); | ||
| 77 | if (tEngine.materialise4SpecificQuery(program, m_record, upperBottom) != 1) { | ||
| 78 | return m_record.getRelevantOntology(); | ||
| 79 | } | ||
| 80 | |||
| 81 | update = m_record.updateUpperBoundAnswers(tEngine.evaluate(m_record.getQueryText())); | ||
| 82 | } finally { | ||
| 83 | tEngine.dispose(); | ||
| 84 | } | ||
| 85 | |||
| 86 | if (m_record.processed()) | ||
| 87 | return null; | ||
| 88 | |||
| 89 | if (!update) break; | ||
| 90 | |||
| 91 | m_record.updateSubID(); | ||
| 92 | m_tracker.extract(m_trackingStore, botQueryRecords, true); | ||
| 93 | } | ||
| 94 | |||
| 95 | return m_record.getRelevantOntology(); | ||
| 96 | } | ||
| 97 | |||
| 98 | public void dispose() { | ||
| 99 | File file = new File(tempDataFile); | ||
| 100 | if (file.exists()) { | ||
| 101 | file.delete(); | ||
| 102 | Utility.logDebug(file.getAbsolutePath() + " is deleted."); | ||
| 103 | } | ||
| 104 | } | ||
| 105 | |||
| 106 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java new file mode 100644 index 0000000..2c2feae --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | |||
| @@ -0,0 +1,331 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import java.util.Collection; | ||
| 4 | |||
| 5 | import org.semanticweb.karma2.profile.ELHOProfile; | ||
| 6 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 7 | |||
| 8 | import uk.ac.ox.cs.pagoda.multistage.*; | ||
| 9 | import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; | ||
| 10 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | ||
| 11 | import uk.ac.ox.cs.pagoda.query.*; | ||
| 12 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; | ||
| 13 | import uk.ac.ox.cs.pagoda.reasoner.full.Checker; | ||
| 14 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 15 | import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; | ||
| 16 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | ||
| 17 | import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; | ||
| 18 | import uk.ac.ox.cs.pagoda.tracking.*; | ||
| 19 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 20 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 21 | |||
| 22 | public class MyQueryReasoner extends QueryReasoner { | ||
| 23 | |||
| 24 | OWLOntology ontology; | ||
| 25 | |||
| 26 | // String additonalDataFile; | ||
| 27 | |||
| 28 | DatalogProgram program; | ||
| 29 | |||
| 30 | BasicQueryEngine rlLowerStore = null; | ||
| 31 | BasicQueryEngine lazyUpperStore = null; | ||
| 32 | // boolean[] namedIndividuals_lazyUpper; | ||
| 33 | |||
| 34 | OWLOntology elho_ontology; | ||
| 35 | KarmaQueryEngine elLowerStore = null; | ||
| 36 | |||
| 37 | BasicQueryEngine trackingStore = null; | ||
| 38 | // boolean[] namedIndividuals_tracking; | ||
| 39 | |||
| 40 | boolean equalityTag; | ||
| 41 | boolean multiStageTag; | ||
| 42 | |||
| 43 | public MyQueryReasoner() { | ||
| 44 | setup(true, true); | ||
| 45 | } | ||
| 46 | |||
| 47 | public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | ||
| 48 | setup(multiStageTag, considerEqualities); | ||
| 49 | } | ||
| 50 | |||
| 51 | private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { | ||
| 52 | if (multiStageTag) | ||
| 53 | return new MultiStageQueryEngine(name, checkValidity); | ||
| 54 | // return new TwoStageQueryEngine(name, checkValidity); | ||
| 55 | else | ||
| 56 | return new BasicQueryEngine(name); | ||
| 57 | } | ||
| 58 | |||
| 59 | public void setup(boolean multiStageTag, boolean considerEqualities) { | ||
| 60 | satisfiable = null; | ||
| 61 | this.multiStageTag = multiStageTag; | ||
| 62 | this.equalityTag = considerEqualities; | ||
| 63 | |||
| 64 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | ||
| 65 | elLowerStore = new KarmaQueryEngine("elho-lower-bound"); | ||
| 66 | |||
| 67 | trackingStore = getUpperStore("tracking", false); | ||
| 68 | } | ||
| 69 | |||
| 70 | protected void internal_importDataFile(String name, String datafile) { | ||
| 71 | // addDataFile(datafile); | ||
| 72 | rlLowerStore.importRDFData(name, datafile); | ||
| 73 | if (lazyUpperStore != null) | ||
| 74 | lazyUpperStore.importRDFData(name, datafile); | ||
| 75 | elLowerStore.importRDFData(name, datafile); | ||
| 76 | trackingStore.importRDFData(name, datafile); | ||
| 77 | } | ||
| 78 | |||
| 79 | @Override | ||
| 80 | public void loadOntology(OWLOntology o) { | ||
| 81 | if (!equalityTag) { | ||
| 82 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | ||
| 83 | o = eliminator.getOutputOntology(); | ||
| 84 | eliminator.save(); | ||
| 85 | } | ||
| 86 | |||
| 87 | ontology = o; | ||
| 88 | program = new DatalogProgram(ontology, !forSemFacet); | ||
| 89 | // program.getLower().save(); | ||
| 90 | // program.getUpper().save(); | ||
| 91 | // program.getGeneral().save(); | ||
| 92 | |||
| 93 | if (multiStageTag && !program.getGeneral().isHorn()) { | ||
| 94 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); // | ||
| 95 | } | ||
| 96 | |||
| 97 | importData(program.getAdditionalDataFile()); | ||
| 98 | |||
| 99 | elho_ontology = new ELHOProfile().getFragment(ontology); | ||
| 100 | elLowerStore.processOntology(elho_ontology); | ||
| 101 | } | ||
| 102 | |||
| 103 | private Collection<String> predicatesWithGap = null; | ||
| 104 | |||
| 105 | public Collection<String> getPredicatesWithGap() { | ||
| 106 | return predicatesWithGap; | ||
| 107 | } | ||
| 108 | |||
| 109 | @Override | ||
| 110 | public boolean preprocess() { | ||
| 111 | t.reset(); | ||
| 112 | Utility.logInfo("Preprocessing ... checking satisfiability ... "); | ||
| 113 | |||
| 114 | String name = "data", datafile = importedData.toString(); | ||
| 115 | rlLowerStore.importRDFData(name, datafile); | ||
| 116 | rlLowerStore.materialise("lower program", program.getLower().toString()); | ||
| 117 | // program.getLower().save(); | ||
| 118 | if (!consistency.checkRLLowerBound()) return false; | ||
| 119 | Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); | ||
| 120 | |||
| 121 | String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); | ||
| 122 | |||
| 123 | elLowerStore.importRDFData(name, datafile); | ||
| 124 | elLowerStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 125 | elLowerStore.materialise("lower program", program.getLower().toString()); | ||
| 126 | elLowerStore.initialiseKarma(); | ||
| 127 | if (!consistency.checkELLowerBound()) return false; | ||
| 128 | |||
| 129 | if (lazyUpperStore != null) { | ||
| 130 | lazyUpperStore.importRDFData(name, datafile); | ||
| 131 | lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 132 | int tag = lazyUpperStore.materialiseRestrictedly(program, null); | ||
| 133 | if (tag != 1) { | ||
| 134 | lazyUpperStore.dispose(); | ||
| 135 | lazyUpperStore = null; | ||
| 136 | } | ||
| 137 | if (tag == -1) return false; | ||
| 138 | } | ||
| 139 | if (consistency.checkLazyUpper()) { | ||
| 140 | satisfiable = true; | ||
| 141 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | ||
| 142 | } | ||
| 143 | |||
| 144 | trackingStore.importRDFData(name, datafile); | ||
| 145 | trackingStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 146 | |||
| 147 | // materialiseFullUpper(); | ||
| 148 | GapByStore4ID gap = new GapByStore4ID(trackingStore); | ||
| 149 | trackingStore.materialiseFoldedly(program, gap); | ||
| 150 | predicatesWithGap = gap.getPredicatesWithGap(); | ||
| 151 | gap.clear(); | ||
| 152 | |||
| 153 | if (program.getGeneral().isHorn()) | ||
| 154 | encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); | ||
| 155 | else | ||
| 156 | encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); | ||
| 157 | // encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); | ||
| 158 | // encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); | ||
| 159 | // encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); | ||
| 160 | |||
| 161 | program.deleteABoxTurtleFile(); | ||
| 162 | |||
| 163 | if (!isConsistent()) | ||
| 164 | return false; | ||
| 165 | |||
| 166 | consistency.extractBottomFragment(); | ||
| 167 | return true; | ||
| 168 | } | ||
| 169 | |||
| 170 | private Boolean satisfiable; | ||
| 171 | private ConsistencyManager consistency = new ConsistencyManager(this); | ||
| 172 | |||
| 173 | TrackingRuleEncoder encoder; | ||
| 174 | |||
| 175 | @Override | ||
| 176 | public boolean isConsistent() { | ||
| 177 | if (satisfiable == null) { | ||
| 178 | satisfiable = consistency.check(); | ||
| 179 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | ||
| 180 | } | ||
| 181 | return satisfiable; | ||
| 182 | } | ||
| 183 | |||
| 184 | Timer t = new Timer(); | ||
| 185 | |||
| 186 | private OWLOntology relevantPart(QueryRecord queryRecord) { | ||
| 187 | AnswerTuples rlAnswer = null, elAnswer = null; | ||
| 188 | |||
| 189 | t.reset(); | ||
| 190 | try { | ||
| 191 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 192 | Utility.logDebug(t.duration()); | ||
| 193 | queryRecord.updateLowerBoundAnswers(rlAnswer); | ||
| 194 | } finally { | ||
| 195 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 196 | } | ||
| 197 | queryRecord.addProcessingTime(Step.LowerBound, t.duration()); | ||
| 198 | rlAnswer = null; | ||
| 199 | |||
| 200 | t.reset(); | ||
| 201 | BasicQueryEngine upperStore = queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore; | ||
| 202 | |||
| 203 | String[] extendedQuery = queryRecord.getExtendedQueryText(); | ||
| 204 | |||
| 205 | queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 206 | if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0])) | ||
| 207 | queryUpperBound(upperStore, queryRecord, extendedQuery[0], queryRecord.getAnswerVariables()); | ||
| 208 | if (!queryRecord.processed() && queryRecord.hasNonAnsDistinguishedVariables()) | ||
| 209 | queryUpperBound(upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables()); | ||
| 210 | |||
| 211 | queryRecord.addProcessingTime(Step.UpperBound, t.duration()); | ||
| 212 | if (queryRecord.processed()) { | ||
| 213 | queryRecord.setDifficulty(Step.UpperBound); | ||
| 214 | return null; | ||
| 215 | } | ||
| 216 | |||
| 217 | t.reset(); | ||
| 218 | try { | ||
| 219 | elAnswer = elLowerStore.evaluate(extendedQuery[0], queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); | ||
| 220 | Utility.logDebug(t.duration()); | ||
| 221 | queryRecord.updateLowerBoundAnswers(elAnswer); | ||
| 222 | } finally { | ||
| 223 | if (elAnswer != null) elAnswer.dispose(); | ||
| 224 | } | ||
| 225 | queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); | ||
| 226 | |||
| 227 | if (queryRecord.processed()) { | ||
| 228 | queryRecord.setDifficulty(Step.ELLowerBound); | ||
| 229 | return null; | ||
| 230 | } | ||
| 231 | |||
| 232 | t.reset(); | ||
| 233 | |||
| 234 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); | ||
| 235 | |||
| 236 | OWLOntology knowledgebase; | ||
| 237 | t.reset(); | ||
| 238 | // if (program.getGeneral().isHorn()) { | ||
| 239 | // knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true); | ||
| 240 | // queryRecord.addProcessingTime(Step.Fragment, t.duration()); | ||
| 241 | // return knowledgebase; | ||
| 242 | // } | ||
| 243 | // else { | ||
| 244 | knowledgebase = tracker.extract(trackingStore, consistency.getQueryRecords(), true); | ||
| 245 | queryRecord.addProcessingTime(Step.Fragment, t.duration()); | ||
| 246 | // } | ||
| 247 | |||
| 248 | if (knowledgebase.isEmpty() || queryRecord.isBottom()) | ||
| 249 | return knowledgebase; | ||
| 250 | |||
| 251 | if (program.getGeneral().isHorn()) return knowledgebase; | ||
| 252 | |||
| 253 | // t.reset(); | ||
| 254 | // if (queryRecord.isHorn() && lazyUpperStore != null) { | ||
| 255 | //// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true); | ||
| 256 | // } else if (queryRecord.getArity() < 3) { | ||
| 257 | // IterativeRefinement iterativeRefinement = new IterativeRefinement(queryRecord, tracker, trackingStore, consistency.getQueryRecords()); | ||
| 258 | // knowledgebase = iterativeRefinement.extractWithFullABox(importedData.toString(), program.getUpperBottomStrategy()); | ||
| 259 | // } | ||
| 260 | // | ||
| 261 | // queryRecord.addProcessingTime(Step.FragmentRefinement, t.duration()); | ||
| 262 | // | ||
| 263 | // if (knowledgebase == null) | ||
| 264 | // queryRecord.setDifficulty(Step.FragmentRefinement); | ||
| 265 | |||
| 266 | return knowledgebase; | ||
| 267 | } | ||
| 268 | |||
| 269 | // int counter = 0; | ||
| 270 | |||
| 271 | private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { | ||
| 272 | AnswerTuples rlAnswer = null; | ||
| 273 | try { | ||
| 274 | Utility.logDebug(queryText); | ||
| 275 | rlAnswer = upperStore.evaluate(queryText, answerVariables); | ||
| 276 | Utility.logDebug(t.duration()); | ||
| 277 | queryRecord.updateUpperBoundAnswers(rlAnswer); | ||
| 278 | rlAnswer.dispose(); | ||
| 279 | } finally { | ||
| 280 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 281 | } | ||
| 282 | rlAnswer = null; | ||
| 283 | } | ||
| 284 | |||
| 285 | @Override | ||
| 286 | public void evaluate(QueryRecord queryRecord) { | ||
| 287 | OWLOntology knowledgebase = relevantPart(queryRecord); | ||
| 288 | |||
| 289 | if (knowledgebase == null) { | ||
| 290 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); | ||
| 291 | return ; | ||
| 292 | } | ||
| 293 | |||
| 294 | int aboxcount = knowledgebase.getABoxAxioms(true).size(); | ||
| 295 | Utility.logDebug("ABox axioms: " + aboxcount + " TBox axioms: " + (knowledgebase.getAxiomCount() - aboxcount)); | ||
| 296 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); | ||
| 297 | |||
| 298 | Timer t = new Timer(); | ||
| 299 | Checker summarisedChecker = new HermitSummaryFilter(queryRecord); | ||
| 300 | int validNumber = summarisedChecker.check(queryRecord.getGapAnswers()); | ||
| 301 | summarisedChecker.dispose(); | ||
| 302 | Utility.logDebug("Total time for full reasoner: " + t.duration()); | ||
| 303 | if (!forSemFacet || validNumber == 0) { | ||
| 304 | queryRecord.markAsProcessed(); | ||
| 305 | Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); | ||
| 306 | } | ||
| 307 | } | ||
| 308 | |||
| 309 | @Override | ||
| 310 | public void evaluateUpper(QueryRecord queryRecord) { | ||
| 311 | AnswerTuples rlAnswer = null; | ||
| 312 | boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; | ||
| 313 | try { | ||
| 314 | rlAnswer = (useFull ? trackingStore: lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 315 | queryRecord.updateUpperBoundAnswers(rlAnswer, true); | ||
| 316 | } finally { | ||
| 317 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 318 | } | ||
| 319 | } | ||
| 320 | |||
| 321 | @Override | ||
| 322 | public void dispose() { | ||
| 323 | if (encoder != null) encoder.dispose(); | ||
| 324 | if (rlLowerStore != null) rlLowerStore.dispose(); | ||
| 325 | if (lazyUpperStore != null) lazyUpperStore.dispose(); | ||
| 326 | if (elLowerStore != null) elLowerStore.dispose(); | ||
| 327 | if (trackingStore != null) trackingStore.dispose(); | ||
| 328 | super.dispose(); | ||
| 329 | } | ||
| 330 | |||
| 331 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java new file mode 100644 index 0000000..b9abf07 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java | |||
| @@ -0,0 +1,17 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import java.util.Collection; | ||
| 4 | |||
| 5 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 6 | |||
| 7 | public interface QueryEngine { | ||
| 8 | |||
| 9 | public void evaluate(Collection<String> queryTexts, String answerFile); | ||
| 10 | |||
| 11 | public AnswerTuples evaluate(String queryText); | ||
| 12 | |||
| 13 | public AnswerTuples evaluate(String queryText, String[] answerVariables); | ||
| 14 | |||
| 15 | public void dispose(); | ||
| 16 | |||
| 17 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java new file mode 100644 index 0000000..0c009a2 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | |||
| @@ -0,0 +1,221 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import java.io.BufferedWriter; | ||
| 4 | import java.io.File; | ||
| 5 | import java.io.FileNotFoundException; | ||
| 6 | import java.io.FileOutputStream; | ||
| 7 | import java.io.IOException; | ||
| 8 | import java.io.OutputStreamWriter; | ||
| 9 | import java.util.Collection; | ||
| 10 | |||
| 11 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 12 | |||
| 13 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | ||
| 14 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 15 | import uk.ac.ox.cs.pagoda.query.QueryManager; | ||
| 16 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 17 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 18 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 19 | |||
| 20 | public abstract class QueryReasoner { | ||
| 21 | |||
| 22 | protected boolean forSemFacet = false; | ||
| 23 | |||
| 24 | public static enum Type { Full, RLU, ELHOU }; | ||
| 25 | |||
| 26 | public static QueryReasoner getInstanceForSemFacet(OWLOntology o) { | ||
| 27 | QueryReasoner reasoner = getInstance(Type.Full, o, true, true); | ||
| 28 | reasoner.forSemFacet = true; | ||
| 29 | return reasoner; | ||
| 30 | } | ||
| 31 | |||
| 32 | |||
| 33 | public static QueryReasoner getInstance(OWLOntology o) { | ||
| 34 | return getInstance(Type.Full, o, true, true); | ||
| 35 | } | ||
| 36 | |||
| 37 | public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { | ||
| 38 | Utility.initialise(); | ||
| 39 | QueryReasoner reasoner; | ||
| 40 | if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); | ||
| 41 | else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); | ||
| 42 | else | ||
| 43 | switch (type) { | ||
| 44 | case RLU: | ||
| 45 | reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); break; | ||
| 46 | case ELHOU: | ||
| 47 | reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); break; | ||
| 48 | default: | ||
| 49 | reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); | ||
| 50 | } | ||
| 51 | return reasoner; | ||
| 52 | } | ||
| 53 | |||
| 54 | public static final String ImportDataFileSeparator = ";"; | ||
| 55 | protected StringBuilder importedData = new StringBuilder(); | ||
| 56 | |||
| 57 | public void importData(String datafile) { | ||
| 58 | if (datafile != null && !datafile.equalsIgnoreCase("null")) | ||
| 59 | importData(datafile.split(ImportDataFileSeparator)); | ||
| 60 | } | ||
| 61 | |||
| 62 | public void importData(String[] datafiles) { | ||
| 63 | if (datafiles != null) { | ||
| 64 | for (String datafile: datafiles) { | ||
| 65 | File file = new File(datafile); | ||
| 66 | if (file.exists()) { | ||
| 67 | if (file.isFile()) importDataFile(file); | ||
| 68 | else importDataDirectory(file); | ||
| 69 | } | ||
| 70 | else { | ||
| 71 | Utility.logError("warning: file " + datafile + " doesn't exists."); | ||
| 72 | } | ||
| 73 | } | ||
| 74 | } | ||
| 75 | } | ||
| 76 | |||
| 77 | private void importDataDirectory(File file) { | ||
| 78 | for (File child: file.listFiles()) | ||
| 79 | if (child.isFile()) importDataFile(child); | ||
| 80 | else importDataDirectory(child); | ||
| 81 | } | ||
| 82 | |||
| 83 | private void importDataFile(File file) { | ||
| 84 | String datafile; | ||
| 85 | try { | ||
| 86 | datafile = file.getCanonicalPath(); | ||
| 87 | } catch (IOException e) { | ||
| 88 | e.printStackTrace(); | ||
| 89 | return ; | ||
| 90 | } | ||
| 91 | importDataFile(datafile); | ||
| 92 | } | ||
| 93 | |||
| 94 | protected final void importDataFile(String datafile) { | ||
| 95 | if (importedData.length() == 0) | ||
| 96 | importedData.append(datafile); | ||
| 97 | else | ||
| 98 | importedData.append(ImportDataFileSeparator).append(datafile); | ||
| 99 | |||
| 100 | } | ||
| 101 | |||
| 102 | public abstract void loadOntology(OWLOntology ontology); | ||
| 103 | |||
| 104 | public abstract boolean preprocess(); | ||
| 105 | |||
| 106 | public abstract boolean isConsistent(); | ||
| 107 | |||
| 108 | public boolean fullReasoner = this instanceof MyQueryReasoner; | ||
| 109 | |||
| 110 | public abstract void evaluate(QueryRecord record); | ||
| 111 | |||
| 112 | public abstract void evaluateUpper(QueryRecord record); | ||
| 113 | |||
| 114 | public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { | ||
| 115 | if (forFacetGeneration) { | ||
| 116 | QueryRecord record = m_queryManager.create(queryText); | ||
| 117 | Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); | ||
| 118 | if (!record.processed()) | ||
| 119 | evaluateUpper(record); | ||
| 120 | // AnswerTuples tuples = record.getUpperBoundAnswers(); | ||
| 121 | // for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { | ||
| 122 | // tuple = tuples.getTuple(); | ||
| 123 | // if (tuple.toString().contains("NC")) | ||
| 124 | // System.out.println(tuple.toString()); | ||
| 125 | // } | ||
| 126 | return record.getUpperBoundAnswers(); | ||
| 127 | } | ||
| 128 | else | ||
| 129 | return evaluate(queryText); | ||
| 130 | } | ||
| 131 | |||
| 132 | public AnswerTuples evaluate(String queryText) { | ||
| 133 | QueryRecord record = m_queryManager.create(queryText); | ||
| 134 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | ||
| 135 | if (!record.processed()) | ||
| 136 | evaluate(record); | ||
| 137 | AnswerTuples answer = record.getAnswers(); | ||
| 138 | record.dispose(); | ||
| 139 | return answer; | ||
| 140 | |||
| 141 | } | ||
| 142 | |||
| 143 | public void evaluate_shell(String queryText) { | ||
| 144 | QueryRecord record = m_queryManager.create(queryText); | ||
| 145 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); | ||
| 146 | if (!record.processed()) | ||
| 147 | evaluate(record); | ||
| 148 | Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); | ||
| 149 | record.dispose(); | ||
| 150 | |||
| 151 | } | ||
| 152 | |||
| 153 | public void evaluate(Collection<QueryRecord> queryRecords) { | ||
| 154 | evaluate(queryRecords, null); | ||
| 155 | } | ||
| 156 | |||
| 157 | BufferedWriter answerWriter = null; | ||
| 158 | |||
| 159 | public void evaluate(Collection<QueryRecord> queryRecords, String answerFile) { | ||
| 160 | if (!isConsistent()) { | ||
| 161 | Utility.logDebug("The ontology and dataset is inconsistent."); | ||
| 162 | return ; | ||
| 163 | } | ||
| 164 | |||
| 165 | if (answerWriter == null && answerFile != null) { | ||
| 166 | try { | ||
| 167 | answerWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(answerFile))); | ||
| 168 | } catch (FileNotFoundException e) { | ||
| 169 | Utility.logInfo("The answer file not found! " + answerFile); | ||
| 170 | return ; | ||
| 171 | } | ||
| 172 | } | ||
| 173 | |||
| 174 | Timer t = new Timer(); | ||
| 175 | for (QueryRecord record: queryRecords) { | ||
| 176 | // if (Integer.parseInt(record.getQueryID()) != 218) continue; | ||
| 177 | Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", | ||
| 178 | record.getQueryText()); | ||
| 179 | if (!record.processed()) { | ||
| 180 | t.reset(); | ||
| 181 | if (!record.processed()) | ||
| 182 | evaluate(record); | ||
| 183 | Utility.logInfo("Total time to answer this query: " + t.duration()); | ||
| 184 | if (!fullReasoner && !record.processed()) { | ||
| 185 | Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); | ||
| 186 | continue; | ||
| 187 | } | ||
| 188 | } | ||
| 189 | // FIXME: change the argument below | ||
| 190 | try { | ||
| 191 | record.outputAnswers(answerWriter); | ||
| 192 | } catch (IOException e) { | ||
| 193 | Utility.logInfo("Error in outputing answers " + answerFile); | ||
| 194 | } | ||
| 195 | record.outputTimes(); | ||
| 196 | record.dispose(); | ||
| 197 | } | ||
| 198 | } | ||
| 199 | |||
| 200 | public void dispose() { | ||
| 201 | if (answerWriter != null) | ||
| 202 | try { | ||
| 203 | answerWriter.close(); | ||
| 204 | } catch (IOException e) { | ||
| 205 | e.printStackTrace(); | ||
| 206 | } | ||
| 207 | Utility.cleanup(); | ||
| 208 | } | ||
| 209 | |||
| 210 | private QueryManager m_queryManager = new QueryManager(); | ||
| 211 | |||
| 212 | public QueryManager getQueryManager() { | ||
| 213 | return m_queryManager; | ||
| 214 | } | ||
| 215 | |||
| 216 | |||
| 217 | public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { | ||
| 218 | return new HermiTReasoner(toCheckSatisfiability); | ||
| 219 | } | ||
| 220 | |||
| 221 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java new file mode 100644 index 0000000..3894874 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java | |||
| @@ -0,0 +1,83 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 4 | import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; | ||
| 5 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 6 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 7 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; | ||
| 8 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 9 | import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; | ||
| 10 | import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; | ||
| 11 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 12 | |||
| 13 | public class RLQueryReasoner extends QueryReasoner { | ||
| 14 | |||
| 15 | RDFoxQueryEngine rlLowerStore = null; | ||
| 16 | |||
| 17 | LowerDatalogProgram program; | ||
| 18 | |||
| 19 | public RLQueryReasoner() { | ||
| 20 | rlLowerStore = new BasicQueryEngine("rl"); | ||
| 21 | } | ||
| 22 | |||
| 23 | Timer t = new Timer(); | ||
| 24 | |||
| 25 | @Override | ||
| 26 | public void evaluate(QueryRecord queryRecord) { | ||
| 27 | AnswerTuples rlAnswer = null; | ||
| 28 | t.reset(); | ||
| 29 | try { | ||
| 30 | rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 31 | queryRecord.updateLowerBoundAnswers(rlAnswer); | ||
| 32 | } finally { | ||
| 33 | if (rlAnswer != null) rlAnswer.dispose(); | ||
| 34 | } | ||
| 35 | queryRecord.addProcessingTime(Step.LowerBound, t.duration()); | ||
| 36 | queryRecord.setDifficulty(Step.LowerBound); | ||
| 37 | queryRecord.markAsProcessed(); | ||
| 38 | } | ||
| 39 | |||
| 40 | @Override | ||
| 41 | public void dispose() { | ||
| 42 | if (rlLowerStore != null) rlLowerStore.dispose(); | ||
| 43 | super.dispose(); | ||
| 44 | } | ||
| 45 | |||
| 46 | @Override | ||
| 47 | public void loadOntology(OWLOntology ontology) { | ||
| 48 | program = new LowerDatalogProgram(); | ||
| 49 | program.load(ontology, new UnaryBottom()); | ||
| 50 | program.transform(); | ||
| 51 | |||
| 52 | importData(program.getAdditionalDataFile()); | ||
| 53 | } | ||
| 54 | |||
| 55 | @Override | ||
| 56 | public boolean preprocess() { | ||
| 57 | rlLowerStore.importRDFData("data", importedData.toString()); | ||
| 58 | rlLowerStore.materialise("lower program", program.toString()); | ||
| 59 | |||
| 60 | if (!isConsistent()) | ||
| 61 | return false; | ||
| 62 | return true; | ||
| 63 | } | ||
| 64 | |||
| 65 | @Override | ||
| 66 | public boolean isConsistent() { | ||
| 67 | AnswerTuples ans = null; | ||
| 68 | try { | ||
| 69 | ans = rlLowerStore.evaluate(QueryRecord.botQueryText, new String[] {"X"}); | ||
| 70 | return !ans.isValid(); | ||
| 71 | } finally { | ||
| 72 | if (ans != null) ans.dispose(); | ||
| 73 | |||
| 74 | } | ||
| 75 | |||
| 76 | } | ||
| 77 | |||
| 78 | @Override | ||
| 79 | public void evaluateUpper(QueryRecord record) { | ||
| 80 | evaluate(record); | ||
| 81 | } | ||
| 82 | |||
| 83 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java new file mode 100644 index 0000000..fe4022d --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | |||
| @@ -0,0 +1,134 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner; | ||
| 2 | |||
| 3 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 4 | |||
| 5 | import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; | ||
| 6 | import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; | ||
| 7 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 8 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 9 | import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; | ||
| 10 | import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; | ||
| 11 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | ||
| 12 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 13 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 14 | |||
| 15 | public class RLUQueryReasoner extends QueryReasoner { | ||
| 16 | |||
| 17 | DatalogProgram program; | ||
| 18 | |||
| 19 | BasicQueryEngine rlLowerStore, rlUpperStore; | ||
| 20 | |||
| 21 | boolean multiStageTag, equalityTag; | ||
| 22 | |||
| 23 | public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | ||
| 24 | this.multiStageTag = multiStageTag; | ||
| 25 | this.equalityTag = considerEqualities; | ||
| 26 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | ||
| 27 | if (!multiStageTag) | ||
| 28 | rlUpperStore = new BasicQueryEngine("rl-upper-bound"); | ||
| 29 | else | ||
| 30 | rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); | ||
| 31 | } | ||
| 32 | |||
| 33 | Timer t = new Timer(); | ||
| 34 | |||
| 35 | @Override | ||
| 36 | public void evaluate(QueryRecord queryRecord) { | ||
| 37 | AnswerTuples ans = null; | ||
| 38 | t.reset(); | ||
| 39 | try { | ||
| 40 | ans = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 41 | Utility.logDebug(t.duration()); | ||
| 42 | queryRecord.updateLowerBoundAnswers(ans); | ||
| 43 | } finally { | ||
| 44 | if (ans != null) ans.dispose(); | ||
| 45 | } | ||
| 46 | queryRecord.addProcessingTime(Step.LowerBound, t.duration()); | ||
| 47 | |||
| 48 | ans = null; | ||
| 49 | t.reset(); | ||
| 50 | try { | ||
| 51 | ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 52 | Utility.logDebug(t.duration()); | ||
| 53 | queryRecord.updateUpperBoundAnswers(ans); | ||
| 54 | } finally { | ||
| 55 | if (ans != null) ans.dispose(); | ||
| 56 | } | ||
| 57 | queryRecord.addProcessingTime(Step.UpperBound, t.duration()); | ||
| 58 | |||
| 59 | if (queryRecord.processed()) | ||
| 60 | queryRecord.setDifficulty(Step.UpperBound); | ||
| 61 | } | ||
| 62 | |||
| 63 | @Override | ||
| 64 | public void evaluateUpper(QueryRecord queryRecord) { | ||
| 65 | AnswerTuples ans = null; | ||
| 66 | try { | ||
| 67 | ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 68 | Utility.logDebug(t.duration()); | ||
| 69 | queryRecord.updateUpperBoundAnswers(ans, true); | ||
| 70 | } finally { | ||
| 71 | if (ans != null) ans.dispose(); | ||
| 72 | } | ||
| 73 | } | ||
| 74 | |||
| 75 | @Override | ||
| 76 | public void dispose() { | ||
| 77 | if (rlLowerStore != null) rlLowerStore.dispose(); | ||
| 78 | if (rlUpperStore != null) rlUpperStore.dispose(); | ||
| 79 | super.dispose(); | ||
| 80 | } | ||
| 81 | |||
| 82 | @Override | ||
| 83 | public void loadOntology(OWLOntology o) { | ||
| 84 | if (!equalityTag) { | ||
| 85 | EqualitiesEliminator eliminator = new EqualitiesEliminator(o); | ||
| 86 | o = eliminator.getOutputOntology(); | ||
| 87 | eliminator.save(); | ||
| 88 | } | ||
| 89 | |||
| 90 | OWLOntology ontology = o; | ||
| 91 | program = new DatalogProgram(ontology); | ||
| 92 | importData(program.getAdditionalDataFile()); | ||
| 93 | } | ||
| 94 | |||
| 95 | @Override | ||
| 96 | public boolean preprocess() { | ||
| 97 | String datafile = importedData.toString(); | ||
| 98 | rlLowerStore.importRDFData("data", datafile); | ||
| 99 | rlLowerStore.materialise("lower program", program.getLower().toString()); | ||
| 100 | |||
| 101 | rlUpperStore.importRDFData("data", datafile); | ||
| 102 | rlUpperStore.materialiseRestrictedly(program, null); | ||
| 103 | |||
| 104 | if (!isConsistent()) | ||
| 105 | return false; | ||
| 106 | |||
| 107 | return true; | ||
| 108 | } | ||
| 109 | |||
| 110 | @Override | ||
| 111 | public boolean isConsistent() { | ||
| 112 | String[] X = new String[] { "X" }; | ||
| 113 | AnswerTuples ans = null; | ||
| 114 | try { | ||
| 115 | ans = rlLowerStore.evaluate(QueryRecord.botQueryText, X); | ||
| 116 | if (ans.isValid()) return false; | ||
| 117 | } finally { | ||
| 118 | if (ans != null) ans.dispose(); | ||
| 119 | } | ||
| 120 | |||
| 121 | ans = null; | ||
| 122 | try { | ||
| 123 | ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X); | ||
| 124 | if (!ans.isValid()) return true; | ||
| 125 | } finally { | ||
| 126 | if (ans != null) ans.dispose(); | ||
| 127 | } | ||
| 128 | |||
| 129 | Utility.logDebug("The consistency of the data has not been determined yet."); | ||
| 130 | return true; | ||
| 131 | } | ||
| 132 | |||
| 133 | |||
| 134 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java b/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java new file mode 100644 index 0000000..849b971 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java | |||
| @@ -0,0 +1,15 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.full; | ||
| 2 | |||
| 3 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 4 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 5 | |||
| 6 | public interface Checker { | ||
| 7 | |||
| 8 | public int check(AnswerTuples answers); | ||
| 9 | |||
| 10 | public boolean check(AnswerTuple answer); | ||
| 11 | |||
| 12 | public boolean isConsistent(); | ||
| 13 | |||
| 14 | public void dispose(); | ||
| 15 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java b/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java new file mode 100644 index 0000000..6f5d363 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java | |||
| @@ -0,0 +1,237 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.full; | ||
| 2 | |||
| 3 | import java.util.HashMap; | ||
| 4 | import java.util.HashSet; | ||
| 5 | import java.util.Map; | ||
| 6 | import java.util.Set; | ||
| 7 | |||
| 8 | import org.semanticweb.HermiT.Reasoner; | ||
| 9 | import org.semanticweb.HermiT.model.DLClause; | ||
| 10 | import org.semanticweb.HermiT.model.Term; | ||
| 11 | import org.semanticweb.HermiT.model.Variable; | ||
| 12 | import org.semanticweb.owlapi.model.IRI; | ||
| 13 | import org.semanticweb.owlapi.model.OWLAxiom; | ||
| 14 | import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; | ||
| 15 | import org.semanticweb.owlapi.model.OWLDataFactory; | ||
| 16 | import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; | ||
| 17 | import org.semanticweb.owlapi.model.OWLIndividual; | ||
| 18 | import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; | ||
| 19 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 20 | import org.semanticweb.owlapi.model.OWLOntologyManager; | ||
| 21 | |||
| 22 | import uk.ac.ox.cs.pagoda.endomorph.Clique; | ||
| 23 | import uk.ac.ox.cs.pagoda.endomorph.DependencyGraph; | ||
| 24 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; | ||
| 25 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 26 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 27 | import uk.ac.ox.cs.pagoda.query.QueryRecord; | ||
| 28 | import uk.ac.ox.cs.pagoda.query.rollup.QueryGraph; | ||
| 29 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | ||
| 30 | import uk.ac.ox.cs.pagoda.util.Namespace; | ||
| 31 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 32 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 33 | |||
| 34 | public class HermitChecker implements Checker { | ||
| 35 | |||
| 36 | protected OWLDataFactory factory; | ||
| 37 | |||
| 38 | private String queryText; | ||
| 39 | private DLClause queryClause; | ||
| 40 | |||
| 41 | private Reasoner hermit; | ||
| 42 | protected String[][] answerVariable; | ||
| 43 | protected OWLOntology ontology; | ||
| 44 | protected QueryRecord record; | ||
| 45 | protected QueryGraph qGraph = null; | ||
| 46 | |||
| 47 | public HermitChecker(Checker checker) { | ||
| 48 | if (checker instanceof HermitChecker) { | ||
| 49 | HermitChecker other = (HermitChecker) checker; | ||
| 50 | factory = other.factory; | ||
| 51 | queryText = other.queryText; | ||
| 52 | queryClause = other.queryClause; | ||
| 53 | answerVariable = other.answerVariable; | ||
| 54 | ontology = other.ontology; | ||
| 55 | // record = other.record; | ||
| 56 | } | ||
| 57 | |||
| 58 | hermit = new Reasoner(ontology); | ||
| 59 | } | ||
| 60 | |||
| 61 | public HermitChecker(OWLOntology ontology, QueryRecord record) { | ||
| 62 | this.ontology = ontology; | ||
| 63 | queryText = record.getQueryText(); | ||
| 64 | answerVariable = record.getVariables(); | ||
| 65 | queryClause = record.getClause(); | ||
| 66 | // this.record = record; | ||
| 67 | } | ||
| 68 | |||
| 69 | public HermitChecker(OWLOntology ontology, String queryText) { | ||
| 70 | this.ontology = ontology; | ||
| 71 | this.queryText = queryText; | ||
| 72 | answerVariable = queryText == null ? null : ConjunctiveQueryHelper.getAnswerVariables(queryText); | ||
| 73 | queryClause = DLClauseHelper.getQuery(queryText, null); | ||
| 74 | // this.record = null; | ||
| 75 | } | ||
| 76 | |||
| 77 | private int tag = 0; | ||
| 78 | AnswerTuple topAnswerTuple = null, botAnswerTuple = null; | ||
| 79 | |||
| 80 | private void initialiseReasoner() { | ||
| 81 | qGraph = new QueryGraph(queryClause.getBodyAtoms(), answerVariable[1], ontology); | ||
| 82 | OWLOntologyManager manager = ontology.getOWLOntologyManager(); | ||
| 83 | factory = manager.getOWLDataFactory(); | ||
| 84 | |||
| 85 | if (hermit != null) hermit.dispose(); | ||
| 86 | |||
| 87 | if (dGraph != null && answerVariable[1].length == 1 && (dGraph.getExits().size() > 1 || dGraph.getEntrances().size() > 1)) { | ||
| 88 | Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); | ||
| 89 | addTopAndBotTuple(axioms); | ||
| 90 | manager.addAxioms(ontology, axioms); | ||
| 91 | hermit = new Reasoner(ontology); | ||
| 92 | if (!hermit.isConsistent()) { | ||
| 93 | hermit.dispose(); | ||
| 94 | manager.removeAxioms(ontology, axioms); | ||
| 95 | hermit = new Reasoner(ontology); | ||
| 96 | } else { | ||
| 97 | if (topAnswerTuple != null && !check(topAnswerTuple)) tag = -1; | ||
| 98 | else if (botAnswerTuple != null && check(botAnswerTuple)) tag = 1; | ||
| 99 | } | ||
| 100 | } | ||
| 101 | else | ||
| 102 | hermit = new Reasoner(ontology); | ||
| 103 | } | ||
| 104 | |||
| 105 | private void addTopAndBotTuple(Set<OWLAxiom> axioms) { | ||
| 106 | String top_str = Namespace.PAGODA_ANONY + "top", bot_str = Namespace.PAGODA_ANONY + "bot"; | ||
| 107 | topAnswerTuple = new AnswerTuple(new uk.ac.ox.cs.JRDFox.model.Individual[] { uk.ac.ox.cs.JRDFox.model.Individual.create(top_str) } ); | ||
| 108 | botAnswerTuple = new AnswerTuple(new uk.ac.ox.cs.JRDFox.model.Individual[] { uk.ac.ox.cs.JRDFox.model.Individual.create(bot_str) } ); | ||
| 109 | OWLIndividual top_ind = factory.getOWLNamedIndividual(IRI.create(top_str)), bot_ind = factory.getOWLNamedIndividual(IRI.create(bot_str)); | ||
| 110 | Map<OWLAxiom, Integer> counter = new HashMap<OWLAxiom, Integer>(); | ||
| 111 | |||
| 112 | Set<String> topAnswers = new HashSet<String>(), botAnswers = new HashSet<String>(); | ||
| 113 | OWLIndividual sub, obj; | ||
| 114 | if (dGraph.getExits().size() > 1) { | ||
| 115 | for (Clique answerClique: dGraph.getExits()) | ||
| 116 | topAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative().getAnswerTuple().getGroundTerm(0)).getIRI()); | ||
| 117 | } | ||
| 118 | else topAnswerTuple = null; | ||
| 119 | |||
| 120 | if (dGraph.getEntrances().size() > 1) { | ||
| 121 | for (Clique answerClique: dGraph.getEntrances()) | ||
| 122 | botAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative().getAnswerTuple().getGroundTerm(0)).getIRI()); | ||
| 123 | } | ||
| 124 | else botAnswerTuple = null; | ||
| 125 | |||
| 126 | for (OWLAxiom axiom: ontology.getABoxAxioms(true)) | ||
| 127 | if (axiom instanceof OWLClassAssertionAxiom) { | ||
| 128 | OWLClassAssertionAxiom ca = (OWLClassAssertionAxiom) axiom; | ||
| 129 | sub = ca.getIndividual(); | ||
| 130 | if (topAnswers.contains(sub.toStringID())) | ||
| 131 | axioms.add(factory.getOWLClassAssertionAxiom(ca.getClassExpression(), top_ind)); | ||
| 132 | if (botAnswers.contains(sub.toStringID())) | ||
| 133 | inc(counter, factory.getOWLClassAssertionAxiom(ca.getClassExpression(), bot_ind)); | ||
| 134 | } | ||
| 135 | else if (axiom instanceof OWLObjectPropertyAssertionAxiom) { | ||
| 136 | OWLObjectPropertyAssertionAxiom oa = (OWLObjectPropertyAssertionAxiom) axiom; | ||
| 137 | sub = oa.getSubject(); obj = oa.getObject(); | ||
| 138 | if (topAnswers.contains(sub.toStringID())) | ||
| 139 | if (topAnswers.contains(obj.toStringID())) | ||
| 140 | axioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), top_ind, top_ind)); | ||
| 141 | else | ||
| 142 | axioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), top_ind, obj)); | ||
| 143 | else { | ||
| 144 | if (topAnswers.contains(obj.toStringID())) | ||
| 145 | axioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), sub, top_ind)); | ||
| 146 | } | ||
| 147 | |||
| 148 | if (botAnswers.contains(sub.toStringID())) | ||
| 149 | if (botAnswers.contains(obj.toStringID())) | ||
| 150 | inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), bot_ind, bot_ind)); | ||
| 151 | else | ||
| 152 | inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), bot_ind, obj)); | ||
| 153 | else { | ||
| 154 | if (botAnswers.contains(obj.toStringID())) | ||
| 155 | inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), sub, bot_ind)); | ||
| 156 | } | ||
| 157 | |||
| 158 | } | ||
| 159 | else if (axiom instanceof OWLDataPropertyAssertionAxiom) { | ||
| 160 | OWLDataPropertyAssertionAxiom da = (OWLDataPropertyAssertionAxiom) axiom; | ||
| 161 | sub = da.getSubject(); | ||
| 162 | if (topAnswers.contains(sub.toStringID())) | ||
| 163 | axioms.add(factory.getOWLDataPropertyAssertionAxiom(da.getProperty(), top_ind, da.getObject())); | ||
| 164 | |||
| 165 | if (botAnswers.contains(sub.toStringID())) | ||
| 166 | inc(counter, factory.getOWLDataPropertyAssertionAxiom(da.getProperty(), bot_ind, da.getObject())); | ||
| 167 | } | ||
| 168 | |||
| 169 | int number = botAnswers.size(); | ||
| 170 | for (Map.Entry<OWLAxiom, Integer> entry: counter.entrySet()) { | ||
| 171 | if (entry.getValue() == number) | ||
| 172 | axioms.add(entry.getKey()); | ||
| 173 | } | ||
| 174 | } | ||
| 175 | |||
| 176 | private void inc(Map<OWLAxiom, Integer> counter, OWLAxiom newAxiom) { | ||
| 177 | Integer number = counter.get(newAxiom); | ||
| 178 | if (number == null) counter.put(newAxiom, 1); | ||
| 179 | else counter.put(newAxiom, number + 1); | ||
| 180 | } | ||
| 181 | |||
| 182 | @Override | ||
| 183 | public int check(AnswerTuples answers) { | ||
| 184 | if (hermit == null) initialiseReasoner(); | ||
| 185 | int answerCounter = 0, counter = 0; | ||
| 186 | for (; answers.isValid(); answers.moveNext()) { | ||
| 187 | ++counter; | ||
| 188 | if (check(answers.getTuple())) ++answerCounter; | ||
| 189 | } | ||
| 190 | answers.dispose(); | ||
| 191 | |||
| 192 | Utility.logDebug("The number of individuals to be checked by HermiT: " + counter, | ||
| 193 | "The number of correct answers: " + answerCounter); | ||
| 194 | return answerCounter; | ||
| 195 | } | ||
| 196 | |||
| 197 | private int counter = 0; | ||
| 198 | |||
| 199 | @Override | ||
| 200 | public boolean check(AnswerTuple answerTuple) { | ||
| 201 | if (hermit == null) initialiseReasoner(); | ||
| 202 | if (tag != 0) return tag == 1; | ||
| 203 | ++counter; | ||
| 204 | Timer t = new Timer(); | ||
| 205 | Map<Variable, Term> sub = answerTuple.getAssignment(answerVariable[1]); | ||
| 206 | Set<OWLAxiom> toCheckAxioms = qGraph.getAssertions(sub); | ||
| 207 | |||
| 208 | // for (OWLAxiom axiom: toCheckAxioms) System.out.println(axiom.toString()); | ||
| 209 | |||
| 210 | if (hermit.isEntailed(toCheckAxioms)) { | ||
| 211 | Utility.logDebug("@TIME to check one tuple: " + t.duration()); | ||
| 212 | return true; | ||
| 213 | } | ||
| 214 | Utility.logDebug("@TIME to check one tuple: " + t.duration()); | ||
| 215 | return false; | ||
| 216 | } | ||
| 217 | |||
| 218 | @Override | ||
| 219 | public boolean isConsistent() { | ||
| 220 | if (hermit == null) initialiseReasoner(); | ||
| 221 | return hermit.isConsistent(); | ||
| 222 | } | ||
| 223 | |||
| 224 | |||
| 225 | public void dispose() { | ||
| 226 | Utility.logInfo("Hermit was called " + counter + " times."); | ||
| 227 | if (hermit != null) hermit.dispose(); | ||
| 228 | hermit = null; | ||
| 229 | } | ||
| 230 | |||
| 231 | private DependencyGraph dGraph = null; | ||
| 232 | |||
| 233 | public void setDependencyGraph(DependencyGraph dGraph) { | ||
| 234 | this.dGraph = dGraph; | ||
| 235 | } | ||
| 236 | |||
| 237 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java new file mode 100644 index 0000000..3207ff1 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java | |||
| @@ -0,0 +1,366 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import java.util.Arrays; | ||
| 4 | import java.util.Collection; | ||
| 5 | import java.util.HashSet; | ||
| 6 | import java.util.Iterator; | ||
| 7 | import java.util.Set; | ||
| 8 | |||
| 9 | import org.semanticweb.HermiT.model.DLClause; | ||
| 10 | |||
| 11 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; | ||
| 12 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 13 | import uk.ac.ox.cs.pagoda.query.GapByStore4ID; | ||
| 14 | import uk.ac.ox.cs.pagoda.rules.DatalogProgram; | ||
| 15 | import uk.ac.ox.cs.pagoda.rules.Program; | ||
| 16 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | ||
| 17 | import uk.ac.ox.cs.pagoda.util.Namespace; | ||
| 18 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 19 | import uk.ac.ox.cs.pagoda.util.UFS; | ||
| 20 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 21 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 22 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 23 | import uk.ac.ox.cs.JRDFox.store.Parameters; | ||
| 24 | import uk.ac.ox.cs.JRDFox.store.TripleStatus; | ||
| 25 | import uk.ac.ox.cs.JRDFox.store.TupleIterator; | ||
| 26 | import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; | ||
| 27 | |||
| 28 | public class BasicQueryEngine extends RDFoxQueryEngine { | ||
| 29 | |||
| 30 | protected DataStore store; | ||
| 31 | protected Parameters parameters = new Parameters(); | ||
| 32 | |||
| 33 | public BasicQueryEngine(String name) { | ||
| 34 | super(name); | ||
| 35 | store = RDFoxQueryEngine.createDataStore(); | ||
| 36 | parameters.m_allAnswersInRoot = true; | ||
| 37 | parameters.m_useBushy = true; | ||
| 38 | } | ||
| 39 | |||
| 40 | public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { | ||
| 41 | if (gap != null) { | ||
| 42 | materialise("lower program", dProgram.getLower().toString()); | ||
| 43 | String program = dProgram.getUpper().toString(); | ||
| 44 | try { | ||
| 45 | gap.compile(program); | ||
| 46 | gap.addBackTo(); | ||
| 47 | getDataStore().clearRulesAndMakeFactsExplicit(); | ||
| 48 | } catch (JRDFStoreException e) { | ||
| 49 | e.printStackTrace(); | ||
| 50 | gap.clear(); | ||
| 51 | } finally { | ||
| 52 | } | ||
| 53 | } | ||
| 54 | else | ||
| 55 | materialise("upper program", dProgram.getUpper().toString()); | ||
| 56 | } | ||
| 57 | |||
| 58 | public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { | ||
| 59 | if (gap != null) { | ||
| 60 | materialise("lower program", dProgram.getLower().toString()); | ||
| 61 | String program = dProgram.getUpper().toString(); | ||
| 62 | try { | ||
| 63 | gap.compile(program); | ||
| 64 | gap.addBackTo(); | ||
| 65 | getDataStore().clearRulesAndMakeFactsExplicit(); | ||
| 66 | } catch (JRDFStoreException e) { | ||
| 67 | e.printStackTrace(); | ||
| 68 | } finally { | ||
| 69 | gap.clear(); | ||
| 70 | } | ||
| 71 | } | ||
| 72 | else | ||
| 73 | materialise("upper program", dProgram.getUpper().toString()); | ||
| 74 | |||
| 75 | return 1; | ||
| 76 | } | ||
| 77 | |||
| 78 | @Override | ||
| 79 | public AnswerTuples evaluate(String queryText) { | ||
| 80 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); | ||
| 81 | } | ||
| 82 | |||
| 83 | @Override | ||
| 84 | public AnswerTuples evaluate(String queryText, String[] answerVars) { | ||
| 85 | TupleIterator tupleIterator; | ||
| 86 | try { | ||
| 87 | tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); | ||
| 88 | } catch (JRDFStoreException e) { | ||
| 89 | e.printStackTrace(); | ||
| 90 | return null; | ||
| 91 | } | ||
| 92 | return new RDFoxAnswerTuples(answerVars, tupleIterator); | ||
| 93 | } | ||
| 94 | |||
| 95 | @Override | ||
| 96 | public DataStore getDataStore() { | ||
| 97 | return store; | ||
| 98 | } | ||
| 99 | |||
| 100 | @Override | ||
| 101 | public void dispose() { | ||
| 102 | store.dispose(); | ||
| 103 | } | ||
| 104 | |||
| 105 | protected void outputClassAssertions(String filename) { | ||
| 106 | TupleIterator allTuples = null; | ||
| 107 | boolean redirect = false; | ||
| 108 | try { | ||
| 109 | allTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); | ||
| 110 | redirect = Utility.redirectCurrentOut(filename); | ||
| 111 | for (long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) | ||
| 112 | System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager.getQuotedTerm(allTuples.getResource(1))); | ||
| 113 | } catch (JRDFStoreException e) { | ||
| 114 | e.printStackTrace(); | ||
| 115 | } finally { | ||
| 116 | if (redirect) Utility.closeCurrentOut(); | ||
| 117 | if (allTuples != null) allTuples.dispose(); | ||
| 118 | } | ||
| 119 | } | ||
| 120 | |||
| 121 | public void outputInstance4BinaryPredicate(String iri, String filename) { | ||
| 122 | Utility.redirectCurrentOut(filename); | ||
| 123 | outputInstance4BinaryPredicate(iri); | ||
| 124 | Utility.closeCurrentOut(); | ||
| 125 | } | ||
| 126 | |||
| 127 | public void outputInstance4BinaryPredicate(String iri) { | ||
| 128 | outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); | ||
| 129 | } | ||
| 130 | |||
| 131 | public void outputInstanceNumbers(String filename) { | ||
| 132 | TupleIterator predicateTuples = null; | ||
| 133 | TupleIterator instanceTuples; | ||
| 134 | Set<String> number = new HashSet<String>(); | ||
| 135 | String predicate; | ||
| 136 | try { | ||
| 137 | predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); | ||
| 138 | for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { | ||
| 139 | predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); | ||
| 140 | instanceTuples = null; | ||
| 141 | try { | ||
| 142 | instanceTuples = getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); | ||
| 143 | long totalCount = 0; | ||
| 144 | for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { | ||
| 145 | totalCount += instanceTuples.getMultiplicity(); | ||
| 146 | } | ||
| 147 | number.add(predicate + " * " + totalCount); | ||
| 148 | } finally { | ||
| 149 | if (instanceTuples != null) instanceTuples.dispose(); | ||
| 150 | } | ||
| 151 | } | ||
| 152 | |||
| 153 | predicateTuples.dispose(); | ||
| 154 | |||
| 155 | predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); | ||
| 156 | for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { | ||
| 157 | predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); | ||
| 158 | instanceTuples = null; | ||
| 159 | try { | ||
| 160 | instanceTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); | ||
| 161 | ; | ||
| 162 | long totalCount = 0; | ||
| 163 | for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) | ||
| 164 | totalCount += instanceTuples.getMultiplicity(); | ||
| 165 | number.add(predicate + " * " + totalCount); | ||
| 166 | } finally { | ||
| 167 | if (instanceTuples != null) instanceTuples.dispose(); | ||
| 168 | } | ||
| 169 | } | ||
| 170 | |||
| 171 | } catch (JRDFStoreException e) { | ||
| 172 | e.printStackTrace(); | ||
| 173 | } finally { | ||
| 174 | if (predicateTuples != null) predicateTuples.dispose(); | ||
| 175 | } | ||
| 176 | |||
| 177 | Utility.redirectCurrentOut(filename); | ||
| 178 | String[] ordered = number.toArray(new String[0]); | ||
| 179 | Arrays.sort(ordered, new DLPredicateComparator()); | ||
| 180 | for (String line: ordered) System.out.println(line); | ||
| 181 | Utility.closeCurrentOut(); | ||
| 182 | |||
| 183 | } | ||
| 184 | |||
| 185 | public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { | ||
| 186 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); | ||
| 187 | iter.open(); | ||
| 188 | return iter; | ||
| 189 | } | ||
| 190 | |||
| 191 | public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { | ||
| 192 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); | ||
| 193 | iter.open(); | ||
| 194 | return iter; | ||
| 195 | } | ||
| 196 | |||
| 197 | public void setExpandEquality(boolean flag) { | ||
| 198 | parameters.m_expandEquality = flag; | ||
| 199 | } | ||
| 200 | |||
| 201 | public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { | ||
| 202 | parameters.m_expandEquality = false; | ||
| 203 | TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); | ||
| 204 | iter.open(); | ||
| 205 | parameters.m_expandEquality = true; | ||
| 206 | return iter; | ||
| 207 | } | ||
| 208 | |||
| 209 | |||
| 210 | public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { | ||
| 211 | return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); | ||
| 212 | } | ||
| 213 | |||
| 214 | Set<DLClause> materialisedRules = new HashSet<DLClause>(); | ||
| 215 | |||
| 216 | public String getUnusedRules(Collection<DLClause> clauses, boolean toUpdate) { | ||
| 217 | DLClause clause; | ||
| 218 | for (Iterator<DLClause> iter = clauses.iterator(); iter.hasNext(); ) { | ||
| 219 | if (materialisedRules.contains(clause = iter.next())) | ||
| 220 | iter.remove(); | ||
| 221 | else if (toUpdate) materialisedRules.add(clause); | ||
| 222 | } | ||
| 223 | |||
| 224 | if (clauses.isEmpty()) return null; | ||
| 225 | |||
| 226 | return Program.toString(clauses); | ||
| 227 | } | ||
| 228 | |||
| 229 | public void outputMaterialisedRules() { | ||
| 230 | System.out.println(DLClauseHelper.toString(materialisedRules)); | ||
| 231 | } | ||
| 232 | |||
| 233 | public void outputAnswers(String query) { | ||
| 234 | TupleIterator iter = null; | ||
| 235 | try { | ||
| 236 | iter = internal_evaluate(query); | ||
| 237 | System.out.println(query); | ||
| 238 | int arity = iter.getArity(); | ||
| 239 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { | ||
| 240 | for (int i = 0; i < arity; ++i) | ||
| 241 | System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); | ||
| 242 | System.out.println(); | ||
| 243 | } | ||
| 244 | } catch (JRDFStoreException e) { | ||
| 245 | e.printStackTrace(); | ||
| 246 | } finally { | ||
| 247 | if (iter != null) iter.dispose(); | ||
| 248 | } | ||
| 249 | } | ||
| 250 | |||
| 251 | public void outputInstance4UnaryPredicate(String iri) { | ||
| 252 | outputAnswers("select ?x where { ?x " | ||
| 253 | + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <" | ||
| 254 | + iri | ||
| 255 | + "> .}"); | ||
| 256 | } | ||
| 257 | |||
| 258 | public void outputSubjects(String p, String o) { | ||
| 259 | outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); | ||
| 260 | } | ||
| 261 | |||
| 262 | public void outputObjects(String s, String p) { | ||
| 263 | outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); | ||
| 264 | } | ||
| 265 | |||
| 266 | public void outputIDBFacts() { | ||
| 267 | TupleIterator iter = null; | ||
| 268 | try { | ||
| 269 | iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); | ||
| 270 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { | ||
| 271 | for (int i = 0; i < 3; ++i) | ||
| 272 | System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); | ||
| 273 | System.out.println(); | ||
| 274 | } | ||
| 275 | } catch (JRDFStoreException e) { | ||
| 276 | // TODO Auto-generated catch block | ||
| 277 | e.printStackTrace(); | ||
| 278 | } finally { | ||
| 279 | if (iter != null) iter.dispose(); | ||
| 280 | } | ||
| 281 | |||
| 282 | } | ||
| 283 | |||
| 284 | public void outputType4Individual(String iri) { | ||
| 285 | outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); | ||
| 286 | } | ||
| 287 | |||
| 288 | public int getSameAsNumber() { | ||
| 289 | TupleIterator iter = null; | ||
| 290 | int counter = 0; | ||
| 291 | try { | ||
| 292 | iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); | ||
| 293 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) | ||
| 294 | if (iter.getResourceID(0) != iter.getResourceID(1)) | ||
| 295 | ++counter; | ||
| 296 | } catch (JRDFStoreException e) { | ||
| 297 | e.printStackTrace(); | ||
| 298 | } finally { | ||
| 299 | if (iter != null) iter.dispose(); | ||
| 300 | } | ||
| 301 | return counter; | ||
| 302 | } | ||
| 303 | |||
| 304 | private UFS<String> equalityGroups = null; | ||
| 305 | |||
| 306 | public UFS<String> getEqualityGroups() { | ||
| 307 | if (equalityGroups != null) return equalityGroups; | ||
| 308 | |||
| 309 | equalityGroups = new UFS<String>(); | ||
| 310 | |||
| 311 | TupleIterator answers = null; | ||
| 312 | try { | ||
| 313 | Timer t = new Timer(); | ||
| 314 | answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }"); | ||
| 315 | for (long multi = answers.open(); multi != 0; multi = answers.getNext()) { | ||
| 316 | if (answers.getResourceID(0) != answers.getResourceID(1)) | ||
| 317 | equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); | ||
| 318 | } | ||
| 319 | Utility.logInfo("@Time to group individuals by equality: " + t.duration()); | ||
| 320 | } catch (JRDFStoreException e) { | ||
| 321 | e.printStackTrace(); | ||
| 322 | } finally { | ||
| 323 | if (answers != null) answers.dispose(); | ||
| 324 | } | ||
| 325 | |||
| 326 | return equalityGroups; | ||
| 327 | } | ||
| 328 | |||
| 329 | public void clearRulesAndIDBFacts(Collection<int[]> collection) { | ||
| 330 | // performDeletion(collection); | ||
| 331 | collection.clear(); | ||
| 332 | try { | ||
| 333 | store.clearRulesAndMakeFactsExplicit(); | ||
| 334 | } catch (JRDFStoreException e) { | ||
| 335 | e.printStackTrace(); | ||
| 336 | } | ||
| 337 | } | ||
| 338 | |||
| 339 | @SuppressWarnings("unused") | ||
| 340 | private void performDeletion(Collection<int[]> collection) { | ||
| 341 | Utility.logInfo("Remove all rules, idb facts and added staff..."); | ||
| 342 | Timer timer = new Timer(); | ||
| 343 | TupleIterator iter = null; | ||
| 344 | try { | ||
| 345 | UpdateType ut = UpdateType.ScheduleForDeletion; | ||
| 346 | for (int[] t: collection) | ||
| 347 | store.addTriplesByResourceIDs(t, ut); | ||
| 348 | |||
| 349 | iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); | ||
| 350 | for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { | ||
| 351 | int[] triple = new int[3]; | ||
| 352 | for (int i = 0; i < 3; ++i) | ||
| 353 | triple[i] = iter.getResourceID(i); | ||
| 354 | store.addTriplesByResourceIDs(triple, ut); | ||
| 355 | } | ||
| 356 | store.applyReasoning(true); | ||
| 357 | } catch (JRDFStoreException e) { | ||
| 358 | e.printStackTrace(); | ||
| 359 | } finally { | ||
| 360 | if (iter != null) iter.dispose(); | ||
| 361 | } | ||
| 362 | Utility.logInfo("Time for deletion: " + timer.duration()); | ||
| 363 | } | ||
| 364 | |||
| 365 | |||
| 366 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java new file mode 100644 index 0000000..c22902c --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java | |||
| @@ -0,0 +1,24 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import java.util.Comparator; | ||
| 4 | |||
| 5 | import uk.ac.ox.cs.pagoda.multistage.Normalisation; | ||
| 6 | import uk.ac.ox.cs.pagoda.rules.OverApproxExist; | ||
| 7 | |||
| 8 | public class DLPredicateComparator implements Comparator<String> { | ||
| 9 | |||
| 10 | @Override | ||
| 11 | public int compare(String arg0, String arg1) { | ||
| 12 | int ret = type(arg0) - type(arg1); | ||
| 13 | if (ret != 0) return ret; | ||
| 14 | |||
| 15 | return arg0.compareTo(arg1); | ||
| 16 | } | ||
| 17 | |||
| 18 | private int type(String p) { | ||
| 19 | if (p.contains(OverApproxExist.negativeSuffix)) return 1; | ||
| 20 | if (p.contains(Normalisation.auxiliaryConceptPrefix)) return 2; | ||
| 21 | else return 0; | ||
| 22 | } | ||
| 23 | |||
| 24 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java new file mode 100644 index 0000000..03d2b67 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java | |||
| @@ -0,0 +1,95 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import java.io.FileNotFoundException; | ||
| 4 | import java.io.IOException; | ||
| 5 | import java.util.LinkedList; | ||
| 6 | import java.util.Map.Entry; | ||
| 7 | |||
| 8 | import org.semanticweb.HermiT.model.DLClause; | ||
| 9 | import org.semanticweb.karma2.exception.IllegalInputQueryException; | ||
| 10 | import org.semanticweb.karma2.model.ConjunctiveQuery; | ||
| 11 | import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryParser; | ||
| 12 | import uk.ac.ox.cs.pagoda.MyPrefixes; | ||
| 13 | import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; | ||
| 14 | import uk.ac.ox.cs.pagoda.hermit.RuleHelper; | ||
| 15 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 16 | |||
| 17 | public class KarmaQuery { | ||
| 18 | |||
| 19 | StringBuffer queryBuffer; | ||
| 20 | |||
| 21 | public KarmaQuery(String queryText) { | ||
| 22 | LinkedList<String> answerVariables = new LinkedList<String>(); | ||
| 23 | DLClause clause = DLClauseHelper.getQuery(queryText, answerVariables); | ||
| 24 | String clauseText = RuleHelper.getText(clause); | ||
| 25 | // clauseText = RuleHelper.abbreviateIRI(clauseText).replace(":-", "<-"); | ||
| 26 | clauseText = clauseText.replace(":-", "<-"); | ||
| 27 | queryBuffer = new StringBuffer(); | ||
| 28 | |||
| 29 | clauseText = expandIRI4Arguments(clauseText); | ||
| 30 | |||
| 31 | for (Entry<String, String> entry : MyPrefixes.PAGOdAPrefixes.getPrefixIRIsByPrefixName().entrySet()) | ||
| 32 | if (clauseText.contains(entry.getKey())) { | ||
| 33 | if (queryBuffer.length() > 0) queryBuffer.append(',').append(Utility.LINE_SEPARATOR); | ||
| 34 | queryBuffer.append("prefix ").append(entry.getKey()).append(" <").append(entry.getValue()).append(">"); | ||
| 35 | } | ||
| 36 | if (queryBuffer.length() > 0) queryBuffer.append(Utility.LINE_SEPARATOR); | ||
| 37 | |||
| 38 | queryBuffer.append("p("); | ||
| 39 | boolean first = true; | ||
| 40 | for (String var: answerVariables) { | ||
| 41 | if (first) first = false; | ||
| 42 | else queryBuffer.append(","); | ||
| 43 | |||
| 44 | queryBuffer.append("?").append(var); | ||
| 45 | } | ||
| 46 | queryBuffer.append(")").append(clauseText.substring(0, clauseText.length() - 1)); | ||
| 47 | } | ||
| 48 | |||
| 49 | private String expandIRI4Arguments(String clauseText) { | ||
| 50 | int leftIndex = clauseText.indexOf('('), rightIndex = clauseText.indexOf(')', leftIndex + 1); | ||
| 51 | String argsText, newArgsText; | ||
| 52 | while (leftIndex != -1) { | ||
| 53 | argsText = clauseText.substring(leftIndex + 1, rightIndex); | ||
| 54 | newArgsText = MyPrefixes.PAGOdAPrefixes.expandText(argsText); | ||
| 55 | clauseText = clauseText.replace(argsText, newArgsText); | ||
| 56 | |||
| 57 | rightIndex += newArgsText.length() - argsText.length(); | ||
| 58 | leftIndex = clauseText.indexOf('(', rightIndex + 1); | ||
| 59 | rightIndex = clauseText.indexOf(')', leftIndex + 1); | ||
| 60 | } | ||
| 61 | |||
| 62 | return clauseText; | ||
| 63 | } | ||
| 64 | |||
| 65 | public ConjunctiveQuery getConjunctiveQuery() { | ||
| 66 | ConjunctiveQuery cq = null; | ||
| 67 | try { | ||
| 68 | cq = new ConjunctiveQueryParser(toString()).parse(); | ||
| 69 | } catch (FileNotFoundException e) { | ||
| 70 | // TODO Auto-generated catch block | ||
| 71 | e.printStackTrace(); | ||
| 72 | } catch (IllegalInputQueryException e) { | ||
| 73 | // TODO Auto-generated catch block | ||
| 74 | e.printStackTrace(); | ||
| 75 | } catch (IOException e) { | ||
| 76 | // TODO Auto-generated catch block | ||
| 77 | e.printStackTrace(); | ||
| 78 | } catch (Exception e) { | ||
| 79 | Utility.logDebug("The query cannot be properly handled by KARMA."); | ||
| 80 | return null; | ||
| 81 | } | ||
| 82 | return cq; | ||
| 83 | } | ||
| 84 | |||
| 85 | @Override | ||
| 86 | public String toString() { | ||
| 87 | return queryBuffer.toString(); | ||
| 88 | } | ||
| 89 | |||
| 90 | static String sample = "prefix P0: <http://swat.cse.lehigh.edu/onto/univ-bench.owl#>, " + | ||
| 91 | "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>, " + | ||
| 92 | "prefix owl: <http://www.w3.org/2002/07/owl#>" + | ||
| 93 | "q(?0) <- owl:Thing(?0), P0:Person(?0)"; | ||
| 94 | |||
| 95 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java new file mode 100644 index 0000000..f70dde9 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java | |||
| @@ -0,0 +1,98 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import java.io.File; | ||
| 4 | import java.io.FileNotFoundException; | ||
| 5 | import java.util.*; | ||
| 6 | |||
| 7 | import org.semanticweb.karma2.*; | ||
| 8 | import org.semanticweb.karma2.clausifier.OntologyProcesser; | ||
| 9 | import org.semanticweb.karma2.exception.IllegalInputOntologyException; | ||
| 10 | import org.semanticweb.karma2.model.ConjunctiveQuery; | ||
| 11 | import org.semanticweb.owlapi.model.OWLOntology; | ||
| 12 | |||
| 13 | import uk.ac.ox.cs.pagoda.query.*; | ||
| 14 | import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; | ||
| 15 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 16 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 17 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 18 | |||
| 19 | public class KarmaQueryEngine extends RDFoxQueryEngine { | ||
| 20 | |||
| 21 | private MyKarma reasoner = null; | ||
| 22 | |||
| 23 | String karmaDataFile = null, karmaRuleFile = null; | ||
| 24 | |||
| 25 | public KarmaQueryEngine(String name) { | ||
| 26 | super(name); | ||
| 27 | |||
| 28 | // int Base = 1 << 6; | ||
| 29 | // int index = (new Random().nextInt() % Base + Base) % Base; | ||
| 30 | // karmaDataFile = "karma_data" + index + ".ttl"; | ||
| 31 | // karmaRuleFile = "karma_rule" + index + ".dlog"; | ||
| 32 | karmaDataFile = Utility.TempDirectory + "karma_data.ttl"; | ||
| 33 | karmaRuleFile = Utility.TempDirectory + "karma_rule.dlog"; | ||
| 34 | |||
| 35 | reasoner = new MyKarma(); | ||
| 36 | } | ||
| 37 | |||
| 38 | public MyKarma getReasoner() { | ||
| 39 | return reasoner; | ||
| 40 | } | ||
| 41 | |||
| 42 | public void processOntology(OWLOntology elhoOntology) { | ||
| 43 | try { | ||
| 44 | OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile)); | ||
| 45 | } catch (IllegalInputOntologyException e) { | ||
| 46 | e.printStackTrace(); | ||
| 47 | } | ||
| 48 | } | ||
| 49 | |||
| 50 | @Override | ||
| 51 | public void dispose() { | ||
| 52 | reasoner.dispose(); | ||
| 53 | } | ||
| 54 | |||
| 55 | @Override | ||
| 56 | public AnswerTuples evaluate(String queryText) { | ||
| 57 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null); | ||
| 58 | } | ||
| 59 | |||
| 60 | @Override | ||
| 61 | public AnswerTuples evaluate(String queryText, String[] answerVars) { | ||
| 62 | return evaluate(queryText, answerVars, null); | ||
| 63 | } | ||
| 64 | |||
| 65 | public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) { | ||
| 66 | return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples); | ||
| 67 | } | ||
| 68 | |||
| 69 | public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) { | ||
| 70 | KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?")); | ||
| 71 | reasoner.setConcurrence(false); | ||
| 72 | ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery(); | ||
| 73 | if (cq == null) return null; | ||
| 74 | Set<AnswerTuple> answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:")); | ||
| 75 | return new AnswerTuplesImp(answerVars, answers); | ||
| 76 | } | ||
| 77 | |||
| 78 | @Override | ||
| 79 | public DataStore getDataStore() { | ||
| 80 | return reasoner.getStore(); | ||
| 81 | } | ||
| 82 | |||
| 83 | public void initialiseKarma() { | ||
| 84 | try { | ||
| 85 | reasoner.initializeData(new File(karmaDataFile)); | ||
| 86 | reasoner.materialise(new File(karmaRuleFile)); | ||
| 87 | |||
| 88 | File tmp; | ||
| 89 | if (karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete(); | ||
| 90 | if (karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete(); | ||
| 91 | } catch (FileNotFoundException e) { | ||
| 92 | e.printStackTrace(); | ||
| 93 | } catch (JRDFStoreException e) { | ||
| 94 | e.printStackTrace(); | ||
| 95 | } | ||
| 96 | } | ||
| 97 | |||
| 98 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java new file mode 100644 index 0000000..dd71809 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java | |||
| @@ -0,0 +1,100 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import org.semanticweb.HermiT.model.Constant; | ||
| 4 | import org.semanticweb.HermiT.model.Individual; | ||
| 5 | import org.semanticweb.HermiT.model.Term; | ||
| 6 | |||
| 7 | import uk.ac.ox.cs.pagoda.query.AnswerTuple; | ||
| 8 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 9 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 10 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 11 | import uk.ac.ox.cs.JRDFox.model.GroundTerm; | ||
| 12 | import uk.ac.ox.cs.JRDFox.store.TupleIterator; | ||
| 13 | |||
| 14 | public class RDFoxAnswerTuples implements AnswerTuples { | ||
| 15 | |||
| 16 | long multi; | ||
| 17 | TupleIterator m_iter; | ||
| 18 | String[] m_answerVars; | ||
| 19 | |||
| 20 | public RDFoxAnswerTuples(String[] answerVars, TupleIterator iter) { | ||
| 21 | m_answerVars = answerVars; | ||
| 22 | m_iter = iter; | ||
| 23 | reset(); | ||
| 24 | } | ||
| 25 | |||
| 26 | @Override | ||
| 27 | public boolean isValid() { | ||
| 28 | return multi != 0; | ||
| 29 | } | ||
| 30 | |||
| 31 | @Override | ||
| 32 | public int getArity() { | ||
| 33 | try { | ||
| 34 | return m_iter.getArity(); | ||
| 35 | } catch (JRDFStoreException e) { | ||
| 36 | e.printStackTrace(); | ||
| 37 | return -1; | ||
| 38 | } | ||
| 39 | } | ||
| 40 | |||
| 41 | @Override | ||
| 42 | public void moveNext() { | ||
| 43 | try { | ||
| 44 | multi = m_iter.getNext(); | ||
| 45 | } catch (JRDFStoreException e) { | ||
| 46 | e.printStackTrace(); | ||
| 47 | } | ||
| 48 | } | ||
| 49 | |||
| 50 | @Override | ||
| 51 | public void dispose() { | ||
| 52 | m_iter.dispose(); | ||
| 53 | } | ||
| 54 | |||
| 55 | protected void finalize() { | ||
| 56 | m_iter.dispose(); | ||
| 57 | } | ||
| 58 | |||
| 59 | @Override | ||
| 60 | public AnswerTuple getTuple() { | ||
| 61 | return new AnswerTuple(m_iter, m_answerVars.length); | ||
| 62 | } | ||
| 63 | |||
| 64 | @Override | ||
| 65 | public void reset() { | ||
| 66 | try { | ||
| 67 | multi = m_iter.open(); | ||
| 68 | } catch (JRDFStoreException e) { | ||
| 69 | e.printStackTrace(); | ||
| 70 | } | ||
| 71 | } | ||
| 72 | |||
| 73 | @Override | ||
| 74 | public boolean contains(AnswerTuple t) { | ||
| 75 | Utility.logError("Unsupported operation in RDFoxAnswerTuples"); | ||
| 76 | return false; | ||
| 77 | } | ||
| 78 | |||
| 79 | @Override | ||
| 80 | public void remove() { | ||
| 81 | Utility.logError("Unsupported operation in RDFoxAnswerTuples"); | ||
| 82 | } | ||
| 83 | |||
| 84 | @Override | ||
| 85 | public String[] getAnswerVariables() { | ||
| 86 | return m_answerVars; | ||
| 87 | } | ||
| 88 | |||
| 89 | public static Term getHermitTerm(GroundTerm t) { | ||
| 90 | if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual) { | ||
| 91 | uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t; | ||
| 92 | return Individual.create(individual.getIRI()); | ||
| 93 | } | ||
| 94 | else { | ||
| 95 | uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t); | ||
| 96 | return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI()); | ||
| 97 | } | ||
| 98 | } | ||
| 99 | |||
| 100 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java new file mode 100644 index 0000000..30771ab --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java | |||
| @@ -0,0 +1,110 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import java.io.File; | ||
| 4 | import java.util.Collection; | ||
| 5 | |||
| 6 | import uk.ac.ox.cs.pagoda.MyPrefixes; | ||
| 7 | import uk.ac.ox.cs.pagoda.query.AnswerTuples; | ||
| 8 | import uk.ac.ox.cs.pagoda.reasoner.QueryEngine; | ||
| 9 | import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; | ||
| 10 | import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; | ||
| 11 | import uk.ac.ox.cs.pagoda.util.Timer; | ||
| 12 | import uk.ac.ox.cs.pagoda.util.Utility; | ||
| 13 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 14 | import uk.ac.ox.cs.JRDFox.Prefixes; | ||
| 15 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 16 | import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType; | ||
| 17 | |||
| 18 | public abstract class RDFoxQueryEngine implements QueryEngine { | ||
| 19 | |||
| 20 | public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; | ||
| 21 | |||
| 22 | protected String name; | ||
| 23 | protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); | ||
| 24 | |||
| 25 | public RDFoxQueryEngine(String name) { | ||
| 26 | this.name = name; | ||
| 27 | } | ||
| 28 | |||
| 29 | public abstract DataStore getDataStore(); | ||
| 30 | |||
| 31 | public abstract void dispose(); | ||
| 32 | |||
| 33 | public void importRDFData(String fileName, String importedFile) { | ||
| 34 | if (importedFile == null || importedFile.isEmpty()) return ; | ||
| 35 | Timer t = new Timer(); | ||
| 36 | DataStore store = getDataStore(); | ||
| 37 | try { | ||
| 38 | long oldTripleCount = store.getTriplesCount(), tripleCount; | ||
| 39 | for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) | ||
| 40 | store.importTurtleFile(new File(file), prefixes); | ||
| 41 | tripleCount = store.getTriplesCount(); | ||
| 42 | Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); | ||
| 43 | store.clearRulesAndMakeFactsExplicit(); | ||
| 44 | } catch (JRDFStoreException e) { | ||
| 45 | e.printStackTrace(); | ||
| 46 | } | ||
| 47 | Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); | ||
| 48 | } | ||
| 49 | |||
| 50 | public void materialise(String programName, String programText) { | ||
| 51 | if (programText == null) return ; | ||
| 52 | Timer t = new Timer(); | ||
| 53 | DataStore store = getDataStore(); | ||
| 54 | try { | ||
| 55 | long oldTripleCount = store.getTriplesCount(), tripleCount; | ||
| 56 | // store.addRules(new String[] {programText}); | ||
| 57 | store.importRules(programText); | ||
| 58 | store.applyReasoning(); | ||
| 59 | tripleCount = store.getTriplesCount(); | ||
| 60 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); | ||
| 61 | store.clearRulesAndMakeFactsExplicit(); | ||
| 62 | } catch (JRDFStoreException e) { | ||
| 63 | e.printStackTrace(); | ||
| 64 | } | ||
| 65 | Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds."); | ||
| 66 | } | ||
| 67 | |||
| 68 | @Override | ||
| 69 | public void evaluate(Collection<String> queryTexts, String answerFile) { | ||
| 70 | if (queryTexts == null) | ||
| 71 | return ; | ||
| 72 | |||
| 73 | int queryID = 0; | ||
| 74 | AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); | ||
| 75 | AnswerTuples answerTuples; | ||
| 76 | Timer t = new Timer(); | ||
| 77 | try { | ||
| 78 | for (String query: queryTexts) { | ||
| 79 | t.reset(); | ||
| 80 | answerTuples = null; | ||
| 81 | try { | ||
| 82 | answerTuples = evaluate(query); | ||
| 83 | Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); | ||
| 84 | answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); | ||
| 85 | } finally { | ||
| 86 | if (answerTuples != null) answerTuples.dispose(); | ||
| 87 | } | ||
| 88 | } | ||
| 89 | } finally { | ||
| 90 | answerWriter.close(); | ||
| 91 | } | ||
| 92 | |||
| 93 | Utility.logDebug("done computing query answers by RDFox."); | ||
| 94 | |||
| 95 | } | ||
| 96 | |||
| 97 | public static DataStore createDataStore() { | ||
| 98 | DataStore instance = null; | ||
| 99 | try { | ||
| 100 | // instance = new DataStore("par-head-n"); | ||
| 101 | instance = new DataStore(StoreType.NarrowParallelHead); | ||
| 102 | instance.setNumberOfThreads(matNoOfThreads); | ||
| 103 | instance.initialize(); | ||
| 104 | } catch (JRDFStoreException e) { | ||
| 105 | e.printStackTrace(); | ||
| 106 | } | ||
| 107 | return instance; | ||
| 108 | } | ||
| 109 | |||
| 110 | } | ||
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java new file mode 100644 index 0000000..2280b12 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java | |||
| @@ -0,0 +1,249 @@ | |||
| 1 | package uk.ac.ox.cs.pagoda.reasoner.light; | ||
| 2 | |||
| 3 | import java.util.Collection; | ||
| 4 | import java.util.HashMap; | ||
| 5 | import java.util.HashSet; | ||
| 6 | import java.util.LinkedList; | ||
| 7 | import java.util.Map; | ||
| 8 | import java.util.Queue; | ||
| 9 | import java.util.Set; | ||
| 10 | |||
| 11 | import org.semanticweb.HermiT.model.AnnotatedEquality; | ||
| 12 | import org.semanticweb.HermiT.model.Atom; | ||
| 13 | import org.semanticweb.HermiT.model.AtomicConcept; | ||
| 14 | import org.semanticweb.HermiT.model.AtomicRole; | ||
| 15 | import org.semanticweb.HermiT.model.Constant; | ||
| 16 | import org.semanticweb.HermiT.model.DLPredicate; | ||
| 17 | import org.semanticweb.HermiT.model.Equality; | ||
| 18 | import org.semanticweb.HermiT.model.Individual; | ||
| 19 | import org.semanticweb.HermiT.model.Inequality; | ||
| 20 | import org.semanticweb.HermiT.model.Term; | ||
| 21 | import org.semanticweb.HermiT.model.Variable; | ||
| 22 | |||
| 23 | import uk.ac.ox.cs.pagoda.owl.OWLHelper; | ||
| 24 | import uk.ac.ox.cs.pagoda.util.Namespace; | ||
| 25 | import uk.ac.ox.cs.JRDFox.JRDFStoreException; | ||
| 26 | import uk.ac.ox.cs.JRDFox.model.GroundTerm; | ||
| 27 | import uk.ac.ox.cs.JRDFox.store.DataStore; | ||
| 28 | import uk.ac.ox.cs.JRDFox.model.Datatype; | ||
| 29 | import uk.ac.ox.cs.JRDFox.store.Dictionary; | ||
| 30 | import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; | ||
| 31 | import uk.ac.ox.cs.JRDFox.store.Resource; | ||
| 32 | |||
| 33 | public class RDFoxTripleManager { | ||
| 34 | |||
| 35 | UpdateType m_incrementally; | ||
| 36 | // boolean m_incrementally; | ||
| 37 | |||
| 38 | DataStore m_store; | ||
| 39 | Dictionary m_dict; | ||
| 40 | Set<Atom> triplesByTerm = new HashSet<Atom>(); | ||
| 41 | |||
| 42 | public RDFoxTripleManager(DataStore store, boolean incrementally) { | ||
| 43 | m_store = store; | ||
| 44 | // m_incrementally = incrementally; | ||
| 45 | if (incrementally) | ||
| 46 | m_incrementally = UpdateType.ScheduleForAddition; | ||
| 47 | else | ||
| 48 | m_incrementally = UpdateType.Add; | ||
| 49 | |||
| 50 | try { | ||
| 51 | m_dict = store.getDictionary(); | ||
| 52 | resourceID = m_dict.resolveResources( | ||
| 53 | new String[] {Namespace.RDF_TYPE, Namespace.EQUALITY, Namespace.INEQUALITY}, | ||
| 54 | new int[] {Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value()} | ||
| 55 | ); | ||
| 56 | } catch (JRDFStoreException e) { | ||
| 57 | e.printStackTrace(); | ||
| 58 | } | ||
| 59 | } | ||
| 60 | |||
| 61 | public boolean isRdfTypeID(int id) { | ||
| 62 | return id == resourceID[0]; | ||
| 63 | } | ||
| 64 | |||
| 65 | public void addTripleByID(int[] tuple) { | ||
| 66 | try { | ||
| 67 | m_store.addTriplesByResourceIDs(tuple, m_incrementally); | ||
| 68 | } catch (JRDFStoreException e) { | ||
| 69 | e.printStackTrace(); | ||
| 70 | } | ||
| 71 | } | ||
| 72 | |||
| 73 | public void addTripleByTerm(Atom atom) { | ||
| 74 | try { | ||
| 75 | m_store.addTriples(getRDFoxTriple(atom), m_incrementally); | ||
| 76 | } catch (JRDFStoreException e) { | ||
| 77 | e.printStackTrace(); | ||
| 78 | } | ||
| 79 | } | ||
| 80 | |||
| 81 | public static GroundTerm[] getRDFoxTriple(Atom instance) { | ||
| 82 | if (instance.getArity() == 1) | ||
| 83 | return new GroundTerm[] { | ||
| 84 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), | ||
| 85 | uk.ac.ox.cs.JRDFox.model.Individual.RDF_TYPE, | ||
| 86 | uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicConcept) instance.getDLPredicate()).getIRI()) }; | ||
| 87 | else if (instance.getDLPredicate() instanceof Equality || instance.getDLPredicate() instanceof AnnotatedEquality) | ||
| 88 | return new GroundTerm[] { | ||
| 89 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), | ||
| 90 | uk.ac.ox.cs.JRDFox.model.Individual.SAME_AS, | ||
| 91 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) }; | ||
| 92 | else if (instance.getDLPredicate() instanceof Inequality) | ||
| 93 | return new GroundTerm[] { | ||
| 94 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), | ||
| 95 | uk.ac.ox.cs.JRDFox.model.Individual.DIFFERENT_FROM, | ||
| 96 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) }; | ||
| 97 | else | ||
| 98 | return new GroundTerm[] { | ||
| 99 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), | ||
| 100 | uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicRole) instance.getDLPredicate()).getIRI()), | ||
| 101 | uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) }; | ||
| 102 | } | ||
| 103 | |||
| 104 | int[] resourceID; // rdf:type, owl:sameAs, owl:differentFrom | ||
| 105 | |||
| 106 | public int[] getInstance(Atom atom, Map<Variable, Integer> assignment) { | ||
| 107 | DLPredicate p = atom.getDLPredicate(); | ||
| 108 | if (p instanceof Equality || p instanceof AnnotatedEquality) | ||
| 109 | return new int[] { | ||
| 110 | getResourceID(atom.getArgument(0), assignment), | ||
| 111 | resourceID[1], | ||
| 112 | getResourceID(atom.getArgument(1), assignment) | ||
| 113 | }; | ||
| 114 | else if (p instanceof Inequality) | ||
| 115 | return new int[] { | ||
| 116 | getResourceID(atom.getArgument(0), assignment), | ||
| 117 | resourceID[2], | ||
| 118 | getResourceID(atom.getArgument(1), assignment) | ||
| 119 | }; | ||
| 120 | else if (atom.getArity() == 1) | ||
| 121 | return new int[] { | ||
| 122 | getResourceID(atom.getArgument(0), assignment), | ||
| 123 | resourceID[0], | ||
| 124 | getResourceID(p) | ||
| 125 | }; | ||
| 126 | else | ||
| 127 | return new int[] { | ||
| 128 | getResourceID(atom.getArgument(0), assignment), | ||
| 129 | getResourceID(p), | ||
| 130 | getResourceID(atom.getArgument(1), assignment) | ||
| 131 | }; | ||
| 132 | } | ||
| 133 | |||
| 134 | public String getRawTerm(int id) { | ||
| 135 | Resource[] res = new Resource[1]; | ||
| 136 | try { | ||
| 137 | m_dict.getResources(new int[] {id}, 0, 1, res); | ||
| 138 | } catch (JRDFStoreException e) { | ||
| 139 | e.printStackTrace(); | ||
| 140 | } | ||
| 141 | return getQuotedTerm(res[0]); | ||
| 142 | } | ||
| 143 | |||
| 144 | Map<String, Integer> predicateCache = new HashMap<String, Integer>(); | ||
| 145 | |||
| 146 | public int getResourceID(DLPredicate p) { | ||
| 147 | Integer id; | ||
| 148 | String name = p instanceof AtomicConcept ? ((AtomicConcept) p).getIRI() : ((AtomicRole) p).getIRI(); | ||
| 149 | if ((id = predicateCache.get(name)) != null) return id; | ||
| 150 | try { | ||
| 151 | predicateCache.put(name, id = resolveResource(name, Datatype.IRI_REFERENCE.value())); | ||
| 152 | |||
| 153 | } catch (JRDFStoreException e) { | ||
| 154 | e.printStackTrace(); | ||
| 155 | } | ||
| 156 | return id; | ||
| 157 | } | ||
| 158 | |||
| 159 | public int getResourceID(String name) { | ||
| 160 | Integer id = null; | ||
| 161 | try { | ||
| 162 | id = resolveResource(name, Datatype.IRI_REFERENCE.value()); | ||
| 163 | } catch (JRDFStoreException e) { | ||
| 164 | e.printStackTrace(); | ||
| 165 | } | ||
| 166 | return id; | ||
| 167 | } | ||
| 168 | |||
| 169 | private int resolveResource(String name, int type) throws JRDFStoreException { | ||
| 170 | String[] lexicalForms = new String[] {name}; | ||
| 171 | int[] types = new int[] {type}; | ||
| 172 | return m_dict.resolveResources(lexicalForms, types)[0]; | ||
| 173 | } | ||
| 174 | |||
| 175 | Map<Term, Integer> termCache = new HashMap<Term, Integer>(); | ||
| 176 | Queue<Term> termList = new LinkedList<Term>(); | ||
| 177 | int sizeLimit = 10000; | ||
| 178 | |||
| 179 | private int getResourceID(Term arg, Map<Variable, Integer> assignment) { | ||
| 180 | while (termCache.size() > sizeLimit) | ||
| 181 | termCache.remove(termList.poll()); | ||
| 182 | |||
| 183 | if (arg instanceof Variable) return assignment.get((Variable) arg); | ||
| 184 | Integer id = null; | ||
| 185 | if ((id = termCache.get(arg)) != null) | ||
| 186 | return id; | ||
| 187 | |||
| 188 | // if (arg instanceof Individual) { | ||
| 189 | try { | ||
| 190 | if (arg instanceof Individual) | ||
| 191 | termCache.put(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())); | ||
| 192 | else if (arg instanceof Constant) | ||
| 193 | termCache.put(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))); | ||
| 194 | |||
| 195 | } catch (JRDFStoreException e) { | ||
| 196 | e.printStackTrace(); | ||
| 197 | } | ||
| 198 | // } | ||
| 199 | |||
| 200 | return id; | ||
| 201 | } | ||
| 202 | |||
| 203 | private static int getDatatypeID(String uri) { | ||
| 204 | if (uri.equals("http://www.w3.org/2001/XMLSchema#string")) return Datatype.XSD_STRING.value(); | ||
| 205 | if (uri.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral")) return Datatype.RDF_PLAIN_LITERAL.value(); | ||
| 206 | if (uri.equals("http://www.w3.org/2001/XMLSchema#integer")) return Datatype.XSD_INTEGER.value(); | ||
| 207 | if (uri.equals("http://www.w3.org/2001/XMLSchema#float")) return Datatype.XSD_FLOAT.value(); | ||
| 208 | if (uri.equals("http://www.w3.org/2001/XMLSchema#double")) return Datatype.XSD_DOUBLE.value(); | ||
| 209 | if (uri.equals("http://www.w3.org/2001/XMLSchema#boolean")) return Datatype.XSD_BOOLEAN.value(); | ||
| 210 | if (uri.equals("http://www.w3.org/2001/XMLSchema#dateTime")) return Datatype.XSD_DATE_TIME.value(); | ||
| 211 | if (uri.equals("http://www.w3.org/2001/XMLSchema#time")) return Datatype.XSD_TIME.value(); | ||
| 212 | if (uri.equals("http://www.w3.org/2001/XMLSchema#date")) return Datatype.XSD_DATE.value(); | ||
| 213 | if (uri.equals("http://www.w3.org/2001/XMLSchema#gYearMonth")) return Datatype.XSD_G_YEAR_MONTH.value(); | ||
| 214 | if (uri.equals("http://www.w3.org/2001/XMLSchema#gYear")) return Datatype.XSD_G_YEAR.value(); | ||
| 215 | if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonthDay")) return Datatype.XSD_G_MONTH_DAY.value(); | ||
| 216 | if (uri.equals("http://www.w3.org/2001/XMLSchema#gDay")) return Datatype.XSD_G_DAY.value(); | ||
| 217 | if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonth")) return Datatype.XSD_G_MONTH.value(); | ||
| 218 | if (uri.equals("http://www.w3.org/2001/XMLSchema#duration")) return Datatype.XSD_DURATION.value(); | ||
| 219 | |||
| 220 | return -1; | ||
| 221 | } | ||
| 222 | |||
| 223 | public int[] getResourceIDs(Collection<uk.ac.ox.cs.JRDFox.model.Individual> individuals) { | ||
| 224 | String[] str = new String[individuals.size()]; | ||
| 225 | int[] types = new int[individuals.size()]; | ||
| 226 | int index = 0; | ||
| 227 | for (uk.ac.ox.cs.JRDFox.model.Individual individual : individuals) { | ||
| 228 | types[index] = Datatype.IRI_REFERENCE.value(); | ||
| 229 | str[index++] = individual.getIRI(); | ||
| 230 | } | ||
| 231 | |||
| 232 | try { | ||
| 233 | return m_dict.resolveResources(str, types); | ||
| 234 | } catch (JRDFStoreException e) { | ||
| 235 | e.printStackTrace(); | ||
| 236 | return null; | ||
| 237 | } | ||
| 238 | } | ||
| 239 | |||
| 240 | public static String getQuotedTerm(Resource r) { | ||
| 241 | if (r.m_datatype.equals(Datatype.IRI_REFERENCE)) | ||
| 242 | return OWLHelper.addAngles(r.m_lexicalForm); | ||
| 243 | if (r.m_datatype.equals(Datatype.XSD_STRING) || r.m_datatype.equals(Datatype.RDF_PLAIN_LITERAL)) | ||
| 244 | return "\"" + r.m_lexicalForm + "\""; | ||
| 245 | else | ||
| 246 | return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">"; | ||
| 247 | } | ||
| 248 | |||
| 249 | } | ||
