From 4b7253559c290b6fdd1c4122830f153fda85dd62 Mon Sep 17 00:00:00 2001 From: RncLsn Date: Fri, 29 May 2015 18:35:51 +0100 Subject: Disposable. --- .../ox/cs/pagoda/reasoner/ConsistencyManager.java | 275 ++++---- .../ox/cs/pagoda/reasoner/ELHOQueryReasoner.java | 160 ++--- .../ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | 352 +++++----- .../ac/ox/cs/pagoda/reasoner/HermiTReasoner.java | 196 +++--- .../ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | 662 +++++++++---------- src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java | 18 +- src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | 436 +++++++------ .../ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java | 10 +- .../ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | 13 +- .../cs/pagoda/reasoner/light/BasicQueryEngine.java | 720 +++++++++++---------- .../cs/pagoda/reasoner/light/KarmaQueryEngine.java | 155 ++--- .../pagoda/reasoner/light/RDFoxAnswerTuples.java | 44 +- .../cs/pagoda/reasoner/light/RDFoxQueryEngine.java | 219 ++++--- 13 files changed, 1663 insertions(+), 1597 deletions(-) (limited to 'src/uk/ac/ox/cs/pagoda/reasoner') diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java index ef9338a..b4a1775 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java @@ -22,10 +22,11 @@ import uk.ac.ox.cs.pagoda.tracking.QueryTracker; import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.Disposable; import java.util.LinkedList; -public class ConsistencyManager { +public class ConsistencyManager extends Disposable { protected MyQueryReasoner m_reasoner; protected QueryManager m_queryManager; @@ -40,7 +41,109 @@ public class ConsistencyManager { m_reasoner = reasoner; m_queryManager = reasoner.getQueryManager(); } - + + @Override + public void dispose() { + super.dispose(); + fullQueryRecord.dispose(); + } + + public void extractBottomFragment() { + if(fragmentExtracted) return; + fragmentExtracted = true; + + UpperDatalogProgram upperProgram = m_reasoner.program.getUpper(); + int number = upperProgram.getBottomNumber(); + + if(number <= 1) { + botQueryRecords = new QueryRecord[]{fullQueryRecord}; + } + else { + QueryRecord[] tempQueryRecords = new QueryRecord[number - 1]; + QueryRecord record; + for(int i = 0; i < number - 1; ++i) { + tempQueryRecords[i] = record = + m_queryManager.create(QueryRecord.botQueryText.replace("Nothing", "Nothing" + (i + 1)), 0, i + 1); + AnswerTuples iter = null; + try { + iter = m_reasoner.trackingStore.evaluate(record.getQueryText(), record.getAnswerVariables()); + record.updateUpperBoundAnswers(iter); + } finally { + if(iter != null) iter.dispose(); + iter = null; + } + } + + int bottomNumber = 0; + int[] group = new int[number - 1]; + for(int i = 0; i < number - 1; ++i) group[i] = i; + for(int i = 0; i < number - 1; ++i) + if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose(); + else if(group[i] == i) { + ++bottomNumber; + record = tempQueryRecords[i]; + for(int j = i + 1; j < number - 1; ++j) + if(record.hasSameGapAnswers(tempQueryRecords[j])) + group[j] = i; + } + + Utility.logInfo("There are " + bottomNumber + " different bottom fragments."); + toAddClauses = new LinkedList(); + int bottomCounter = 0; + botQueryRecords = new QueryRecord[bottomNumber]; + Variable X = Variable.create("X"); + for(int i = 0; i < number - 1; ++i) + if(!tempQueryRecords[i].isDisposed() && !tempQueryRecords[i].isProcessed()) + if(group[i] == i) { + botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; + record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, + group[i] = bottomCounter); + toAddClauses.add( + DLClause.create( + new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + bottomCounter), X)}, + new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); + } + else { + toAddClauses.add( + DLClause.create( + new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + group[group[i]]), X)}, + new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); + tempQueryRecords[i].dispose(); + } + + upperProgram.updateDependencyGraph(toAddClauses); + } + + String[] programs = collectTrackingProgramAndImport(); + if(programs.length == 0) + return; + + DataStore store = m_reasoner.trackingStore.getDataStore(); + long oldTripleCount, tripleCount; + try { + Timer t1 = new Timer(); + oldTripleCount = store.getTriplesCount(); + for(String program : programs) + store.importRules(program, UpdateType.ScheduleForAddition); + store.applyReasoning(true); + tripleCount = store.getTriplesCount(); + + Utility.logInfo("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", + "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); + + extractAxioms(); + store.clearRulesAndMakeFactsExplicit(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } catch(OWLOntologyCreationException e) { + e.printStackTrace(); + } + } + + public QueryRecord[] getQueryRecords() { + return botQueryRecords; + } + boolean checkRLLowerBound() { fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); AnswerTuples iter = null; @@ -59,9 +162,22 @@ public class ConsistencyManager { return true; } +// protected boolean unsatisfiability(double duration) { +// fullQueryRecord.dispose(); +// Utility.logDebug("The ontology and dataset is unsatisfiable."); +// return false; +// } + +// protected boolean satisfiability(double duration) { +// fullQueryRecord.dispose(); +// Utility.logDebug("The ontology and dataset is satisfiable."); +// return true; +// } + boolean checkELLowerBound() { - fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); - if (fullQueryRecord.getNoOfSoundAnswers() > 0) { + fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord + .getAnswerVariables())); + if(fullQueryRecord.getNoOfSoundAnswers() > 0) { Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); return true; } @@ -69,39 +185,22 @@ public class ConsistencyManager { } boolean checkUpper(BasicQueryEngine upperStore) { - if (upperStore != null) { + if(upperStore != null) { AnswerTuples tuples = null; try { tuples = upperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); - if (!tuples.isValid()) { - Utility.logInfo("There are no contradictions derived in "+ upperStore.getName() +" materialisation."); + if(!tuples.isValid()) { + Utility.logInfo("There are no contradictions derived in " + upperStore.getName() + " materialisation."); Utility.logDebug("The ontology and dataset is satisfiable."); return true; } - } - finally { - if (tuples != null) tuples.dispose(); + } finally { + if(tuples != null) tuples.dispose(); } } return false; } - void dispose() { - fullQueryRecord.dispose(); - } - -// protected boolean unsatisfiability(double duration) { -// fullQueryRecord.dispose(); -// Utility.logDebug("The ontology and dataset is unsatisfiable."); -// return false; -// } - -// protected boolean satisfiability(double duration) { -// fullQueryRecord.dispose(); -// Utility.logDebug("The ontology and dataset is satisfiable."); -// return true; -// } - boolean check() { // if (!checkRLLowerBound()) return false; // if (!checkELLowerBound()) return false; @@ -148,148 +247,54 @@ public class ConsistencyManager { return true; } - public void extractBottomFragment() { - if (fragmentExtracted) return ; - fragmentExtracted = true; - - UpperDatalogProgram upperProgram = m_reasoner.program.getUpper(); - int number = upperProgram.getBottomNumber(); - - if (number <= 1) { - botQueryRecords = new QueryRecord[] { fullQueryRecord }; - } - else { - QueryRecord[] tempQueryRecords = new QueryRecord[number - 1]; - QueryRecord record; - for (int i = 0; i < number - 1; ++i) { - tempQueryRecords[i] = record = m_queryManager.create(QueryRecord.botQueryText.replace("Nothing", "Nothing" + (i + 1)), 0, i + 1); - AnswerTuples iter = null; - try { - iter = m_reasoner.trackingStore.evaluate(record.getQueryText(), record.getAnswerVariables()); - record.updateUpperBoundAnswers(iter); - } finally { - if (iter != null) iter.dispose(); - iter = null; - } - } - - int bottomNumber = 0; - int[] group = new int[number - 1]; - for (int i = 0; i < number - 1; ++i) group[i] = i; - for (int i = 0; i < number - 1; ++i) - if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose(); - else if (group[i] == i) { - ++bottomNumber; - record = tempQueryRecords[i]; - for (int j = i + 1; j < number - 1; ++j) - if (record.hasSameGapAnswers(tempQueryRecords[j])) - group[j] = i; - } - - Utility.logInfo("There are " + bottomNumber + " different bottom fragments."); - toAddClauses = new LinkedList(); - int bottomCounter = 0; - botQueryRecords = new QueryRecord[bottomNumber]; - Variable X = Variable.create("X"); - for (int i = 0; i < number - 1; ++i) - if(!tempQueryRecords[i].isProcessed()) - if (group[i] == i) { - botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; - record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter); - toAddClauses.add( - DLClause.create( - new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + bottomCounter), X)}, - new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); - } - else { - toAddClauses.add( - DLClause.create( - new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + group[group[i]]), X)}, - new Atom[] {Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)})); - tempQueryRecords[i].dispose(); - } - - upperProgram.updateDependencyGraph(toAddClauses); - } - - String[] programs = collectTrackingProgramAndImport(); - if (programs.length == 0) - return ; - - DataStore store = m_reasoner.trackingStore.getDataStore(); - long oldTripleCount, tripleCount; - try { - Timer t1 = new Timer(); - oldTripleCount = store.getTriplesCount(); - for (String program: programs) - store.importRules(program, UpdateType.ScheduleForAddition); - store.applyReasoning(true); - tripleCount = store.getTriplesCount(); - - Utility.logInfo("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", - "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); - - extractAxioms(); - store.clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } - } - private void extractAxioms4Full() throws OWLOntologyCreationException { - OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); + OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); OWLOntology fullOntology = manager.createOntology(); for (QueryRecord record: botQueryRecords) { for (DLClause clause: record.getRelevantClauses()) { fullQueryRecord.addRelevantClauses(clause); } - manager.addAxioms(fullOntology, record.getRelevantOntology().getAxioms()); + manager.addAxioms(fullOntology, record.getRelevantOntology().getAxioms()); } fullQueryRecord.setRelevantOntology(fullOntology); } private void extractAxioms() throws OWLOntologyCreationException { - OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); + OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); for (QueryRecord record: botQueryRecords) { record.setRelevantOntology(manager.createOntology()); - QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, record); + QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, record); m_reasoner.encoder.setCurrentQuery(record); tracker.extractAxioms(m_reasoner.trackingStore); // record.saveRelevantClause(); // record.saveRelevantOntology("bottom" + record.getQueryID() + ".owl"); Utility.logInfo("finish extracting axioms for bottom " + record.getQueryID()); - } + } } private String[] collectTrackingProgramAndImport() { - String[] programs = new String[botQueryRecords.length]; + String[] programs = new String[botQueryRecords.length]; TrackingRuleEncoder encoder = m_reasoner.encoder; - + StringBuilder builder; - LinkedList currentClauses = new LinkedList(); - + LinkedList currentClauses = new LinkedList(); + for (int i = 0; i < botQueryRecords.length; ++i) { encoder.setCurrentQuery(botQueryRecords[i]); builder = new StringBuilder(encoder.getTrackingProgram()); // encoder.saveTrackingRules("tracking_bottom" + (i + 1) + ".dlog"); - + for (DLClause clause: toAddClauses) if (clause.getHeadAtom(0).getDLPredicate().toString().contains("_final" + (i + 1))) - currentClauses.add(clause); - + currentClauses.add(clause); + builder.append(DLClauseHelper.toString(currentClauses)); - programs[i] = builder.toString(); - + programs[i] = builder.toString(); + currentClauses.clear(); } - - return programs; - } - public QueryRecord[] getQueryRecords() { - return botQueryRecords; + return programs; } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java index f5a8093..0a151bc 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java @@ -10,86 +10,92 @@ import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; class ELHOQueryReasoner extends QueryReasoner { - LowerDatalogProgram program; - - OWLOntology elho_ontology; - KarmaQueryEngine elLowerStore = null; - - private Timer t = new Timer(); - - public ELHOQueryReasoner() { - elLowerStore = new KarmaQueryEngine("el"); - } - - @Override - public void evaluate(QueryRecord queryRecord) { - AnswerTuples elAnswer = null; - t.reset(); - try { - elAnswer = elLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); - queryRecord.updateLowerBoundAnswers(elAnswer); - } finally { - if (elAnswer != null) elAnswer.dispose(); - } - queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); - - queryRecord.setDifficulty(Step.EL_LOWER_BOUND); - queryRecord.markAsProcessed(); - } - - @Override - public void evaluateUpper(QueryRecord queryRecord) { - evaluate(queryRecord); - } - - @Override - public void dispose() { - if (elLowerStore != null) elLowerStore.dispose(); - super.dispose(); - } - - @Override - public void loadOntology(OWLOntology ontology) { - program = new LowerDatalogProgram(properties.getToClassify()); - program.load(ontology, new UnaryBottom()); - program.transform(); - - importData(program.getAdditionalDataFile()); - - elho_ontology = new ELHOProfile().getFragment(ontology); - elLowerStore.processOntology(elho_ontology); - } - - @Override - public boolean preprocess() { - elLowerStore.importRDFData("data", importedData.toString()); - String rlLowerProgramText = program.toString(); + LowerDatalogProgram program; + + OWLOntology elho_ontology; + KarmaQueryEngine elLowerStore = null; + + private Timer t = new Timer(); + + public ELHOQueryReasoner() { + elLowerStore = new KarmaQueryEngine("el"); + } + + @Override + public void evaluate(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); + AnswerTuples elAnswer = null; + t.reset(); + try { + elAnswer = elLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); + queryRecord.updateLowerBoundAnswers(elAnswer); + } finally { + if(elAnswer != null) elAnswer.dispose(); + } + queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); + + queryRecord.setDifficulty(Step.EL_LOWER_BOUND); + queryRecord.markAsProcessed(); + } + + @Override + public void evaluateUpper(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); + evaluate(queryRecord); + } + + @Override + public void dispose() { + super.dispose(); + if(elLowerStore != null) elLowerStore.dispose(); + } + + @Override + public void loadOntology(OWLOntology ontology) { + if(isDisposed()) throw new DisposedException(); + program = new LowerDatalogProgram(properties.getToClassify()); + program.load(ontology, new UnaryBottom()); + program.transform(); + + importData(program.getAdditionalDataFile()); + + elho_ontology = new ELHOProfile().getFragment(ontology); + elLowerStore.processOntology(elho_ontology); + } + + @Override + public boolean preprocess() { + if(isDisposed()) throw new DisposedException(); + elLowerStore.importRDFData("data", importedData.toString()); + String rlLowerProgramText = program.toString(); // program.save(); - elLowerStore.materialise("lower program", rlLowerProgramText); - elLowerStore.initialiseKarma(); - - if (!isConsistent()) { - Utility.logDebug("The dataset is not consistent with the ontology."); - return false; - } - return true; - } - - @Override - public boolean isConsistent() { - String[] X = new String[] {"X"}; - AnswerTuples ans = null; - try { - ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); - if (ans.isValid()) return false; - } finally { - if (ans != null) ans.dispose(); - } - - return true; - } + elLowerStore.materialise("lower program", rlLowerProgramText); + elLowerStore.initialiseKarma(); + + if(!isConsistent()) { + Utility.logDebug("The dataset is not consistent with the ontology."); + return false; + } + return true; + } + + @Override + public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + String[] X = new String[]{"X"}; + AnswerTuples ans = null; + try { + ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); + if(ans.isValid()) return false; + } finally { + if(ans != null) ans.dispose(); + } + + return true; + } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java index c74ea58..771190e 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java @@ -13,181 +13,187 @@ import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; class ELHOUQueryReasoner extends QueryReasoner { - DatalogProgram program; - - BasicQueryEngine rlLowerStore; - BasicQueryEngine rlUpperStore; - - OWLOntology elho_ontology; - KarmaQueryEngine elLowerStore = null; - - boolean multiStageTag, equalityTag; - String originalMarkProgram; - private Timer t = new Timer(); - - public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { - this.multiStageTag = multiStageTag; - this.equalityTag = considerEqualities; - rlLowerStore = new BasicQueryEngine("rl-lower-bound"); - elLowerStore = new KarmaQueryEngine("el-lower-bound"); - - if(!multiStageTag) - rlUpperStore = new BasicQueryEngine("rl-upper-bound"); - else - rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); - } - - @Override - public void evaluate(QueryRecord queryRecord) { - AnswerTuples rlAnswer = null; - t.reset(); - try { - rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); - queryRecord.updateLowerBoundAnswers(rlAnswer); - } finally { - if(rlAnswer != null) rlAnswer.dispose(); - } - queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); - - String extendedQueryText = queryRecord.getExtendedQueryText().get(0); - String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ? - new String[]{queryRecord.getQueryText()} : - new String[] {queryRecord.getQueryText(), extendedQueryText}; - - for (String queryText: toQuery) { - rlAnswer = null; - t.reset(); - try { - rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables()); - queryRecord.updateUpperBoundAnswers(rlAnswer); - } finally { - if(rlAnswer != null) rlAnswer.dispose(); - } - queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); - - if(queryRecord.isProcessed()) { - queryRecord.setDifficulty(Step.UPPER_BOUND); - return; - } - } - - AnswerTuples elAnswer = null; - t.reset(); - try { - elAnswer = - elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); - queryRecord.updateLowerBoundAnswers(elAnswer); - } finally { - if (elAnswer != null) elAnswer.dispose(); - } - queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); - } - - @Override - public void evaluateUpper(QueryRecord queryRecord) { - AnswerTuples rlAnswer = null; - try { - rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); - queryRecord.updateUpperBoundAnswers(rlAnswer, true); - } finally { - if(rlAnswer != null) rlAnswer.dispose(); - } - } - - @Override - public void dispose() { - if (elLowerStore != null) elLowerStore.dispose(); - if(rlUpperStore != null) rlUpperStore.dispose(); - super.dispose(); - } - - @Override - public void loadOntology(OWLOntology o) { - if (!equalityTag) { - EqualitiesEliminator eliminator = new EqualitiesEliminator(o); - o = eliminator.getOutputOntology(); - eliminator.save(); - } - - OWLOntology ontology = o; - program = new DatalogProgram(ontology, properties.getToClassify()); - - importData(program.getAdditionalDataFile()); - - elho_ontology = new ELHOProfile().getFragment(ontology); - elLowerStore.processOntology(elho_ontology); - originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); - } - - @Override - public boolean preprocess() { - String name = "data", datafile = importedData.toString(); - - String lowername = "lower program"; - String rlLowerProgramText = program.getLower().toString(); - - rlUpperStore.importRDFData(name, datafile); - rlUpperStore.materialise("saturate named individuals", originalMarkProgram); - - int flag = rlUpperStore.materialiseRestrictedly(program, null); - if (flag != 1) { - if (flag == -1) return false; - rlUpperStore.dispose(); - - if (!multiStageTag) - rlUpperStore = new BasicQueryEngine("rl-upper-bound"); - else - rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); - rlUpperStore.importRDFData(name, datafile); - rlUpperStore.materialise("saturate named individuals", originalMarkProgram); - rlUpperStore.materialiseFoldedly(program, null); - } - Utility.logInfo("upper store ready."); - - rlLowerStore.importRDFData(name, datafile); - rlLowerStore.materialise(lowername, rlLowerProgramText); - Utility.logInfo("lower store ready."); - - elLowerStore.importRDFData(name, datafile); - elLowerStore.materialise("saturate named individuals", originalMarkProgram); - elLowerStore.materialise(lowername, rlLowerProgramText); - - elLowerStore.initialiseKarma(); - Utility.logInfo("EL lower store ready."); - - if (!isConsistent()) { - Utility.logInfo("The dataset is not consistent with the ontology."); - return false; - } - Utility.logInfo("The dataset is consistent."); - return true; - } - - @Override - public boolean isConsistent() { - Utility.logInfo("Start checking consistency... "); - String[] X = new String[] {"X"}; - AnswerTuples ans = null; - try { - ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X); - if (!ans.isValid()) return true; - } finally { - if (ans != null) ans.dispose(); - } - - ans = null; - try { - ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); - if (ans.isValid()) return false; - } finally { - if (ans != null) ans.dispose(); - } - - Utility.logDebug("The consistency of the data has not been determined yet."); - return true; - } + DatalogProgram program; + + BasicQueryEngine rlLowerStore; + BasicQueryEngine rlUpperStore; + + OWLOntology elho_ontology; + KarmaQueryEngine elLowerStore = null; + + boolean multiStageTag, equalityTag; + String originalMarkProgram; + private Timer t = new Timer(); + + public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { + this.multiStageTag = multiStageTag; + this.equalityTag = considerEqualities; + rlLowerStore = new BasicQueryEngine("rl-lower-bound"); + elLowerStore = new KarmaQueryEngine("el-lower-bound"); + + if(!multiStageTag) + rlUpperStore = new BasicQueryEngine("rl-upper-bound"); + else + rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); + } + + @Override + public void evaluate(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); + AnswerTuples rlAnswer = null; + t.reset(); + try { + rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); + queryRecord.updateLowerBoundAnswers(rlAnswer); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); + + String extendedQueryText = queryRecord.getExtendedQueryText().get(0); + String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ? + new String[]{queryRecord.getQueryText()} : + new String[]{queryRecord.getQueryText(), extendedQueryText}; + + for(String queryText : toQuery) { + rlAnswer = null; + t.reset(); + try { + rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables()); + queryRecord.updateUpperBoundAnswers(rlAnswer); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration()); + + if(queryRecord.isProcessed()) { + queryRecord.setDifficulty(Step.UPPER_BOUND); + return; + } + } + + AnswerTuples elAnswer = null; + t.reset(); + try { + elAnswer = + elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); + queryRecord.updateLowerBoundAnswers(elAnswer); + } finally { + if(elAnswer != null) elAnswer.dispose(); + } + queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); + } + + @Override + public void evaluateUpper(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); + AnswerTuples rlAnswer = null; + try { + rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); + queryRecord.updateUpperBoundAnswers(rlAnswer, true); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + } + + @Override + public void dispose() { + super.dispose(); + if(elLowerStore != null) elLowerStore.dispose(); + if(rlUpperStore != null) rlUpperStore.dispose(); + } + + @Override + public void loadOntology(OWLOntology o) { + if(isDisposed()) throw new DisposedException(); + if(!equalityTag) { + EqualitiesEliminator eliminator = new EqualitiesEliminator(o); + o = eliminator.getOutputOntology(); + eliminator.save(); + } + + OWLOntology ontology = o; + program = new DatalogProgram(ontology, properties.getToClassify()); + + importData(program.getAdditionalDataFile()); + + elho_ontology = new ELHOProfile().getFragment(ontology); + elLowerStore.processOntology(elho_ontology); + originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); + } + + @Override + public boolean preprocess() { + if(isDisposed()) throw new DisposedException(); + String name = "data", datafile = importedData.toString(); + + String lowername = "lower program"; + String rlLowerProgramText = program.getLower().toString(); + + rlUpperStore.importRDFData(name, datafile); + rlUpperStore.materialise("saturate named individuals", originalMarkProgram); + + int flag = rlUpperStore.materialiseRestrictedly(program, null); + if(flag != 1) { + if(flag == -1) return false; + rlUpperStore.dispose(); + + if(!multiStageTag) + rlUpperStore = new BasicQueryEngine("rl-upper-bound"); + else + rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); + rlUpperStore.importRDFData(name, datafile); + rlUpperStore.materialise("saturate named individuals", originalMarkProgram); + rlUpperStore.materialiseFoldedly(program, null); + } + Utility.logInfo("upper store ready."); + + rlLowerStore.importRDFData(name, datafile); + rlLowerStore.materialise(lowername, rlLowerProgramText); + Utility.logInfo("lower store ready."); + + elLowerStore.importRDFData(name, datafile); + elLowerStore.materialise("saturate named individuals", originalMarkProgram); + elLowerStore.materialise(lowername, rlLowerProgramText); + + elLowerStore.initialiseKarma(); + Utility.logInfo("EL lower store ready."); + + if(!isConsistent()) { + Utility.logInfo("The dataset is not consistent with the ontology."); + return false; + } + Utility.logInfo("The dataset is consistent."); + return true; + } + + @Override + public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + Utility.logInfo("Start checking consistency... "); + String[] X = new String[]{"X"}; + AnswerTuples ans = null; + try { + ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X); + if(!ans.isValid()) return true; + } finally { + if(ans != null) ans.dispose(); + } + + ans = null; + try { + ans = elLowerStore.evaluate(QueryRecord.botQueryText, X); + if(ans.isValid()) return false; + } finally { + if(ans != null) ans.dispose(); + } + + Utility.logDebug("The consistency of the data has not been determined yet."); + return true; + } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java index d1856c9..78b9a0b 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java @@ -10,6 +10,7 @@ import uk.ac.ox.cs.pagoda.query.*; import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import java.io.File; import java.io.IOException; @@ -17,100 +18,105 @@ import java.util.HashSet; import java.util.Set; class HermiTReasoner extends QueryReasoner { - - Reasoner hermit; - - BasicQueryEngine upperStore = null; - - OWLOntology onto; - OWLDataFactory factory; - - String importedOntologyPath = null; - - QueryRoller roller; - boolean toCheckSatisfiability; - - public HermiTReasoner(boolean toCheckSatisfiability) { - this.toCheckSatisfiability = toCheckSatisfiability; - } - - @Override - public void loadOntology(OWLOntology ontology) { - onto = ontology; - } - - @Override - public boolean preprocess() { - OWLOntology tbox = onto; - try { - onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); - importedOntologyPath = OWLHelper.getOntologyPath(onto); - } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) { - e.printStackTrace(); - } - - DatalogProgram datalogProgram = new DatalogProgram(tbox, false); - importData(datalogProgram.getAdditionalDataFile()); - upperStore = new MultiStageQueryEngine("rl-upper", false); - upperStore.importRDFData("data", importedData.toString()); - GapByStore4ID gap = new GapByStore4ID(upperStore); - upperStore.materialiseFoldedly(datalogProgram, gap); - gap.clear(); - - factory = onto.getOWLOntologyManager().getOWLDataFactory(); - roller = new QueryRoller(factory); - - hermit = new Reasoner(onto); - return isConsistent(); - } - - @Override - public boolean isConsistent() { - if (toCheckSatisfiability) - return hermit.isConsistent(); - return true; - } - - @Override - public void evaluate(QueryRecord record) { - String[] disVars = record.getDistinguishedVariables(); - Set individuals = onto.getIndividualsInSignature(true); - if (disVars.length == 1) { - OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]); - Set answers = new HashSet(); - for (OWLNamedIndividual individual: individuals) { - Utility.logDebug("checking ... " + individual); - if (hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) { - answers.add(new AnswerTuple(new Individual[] {Individual.create(individual.toStringID())})); - } - } - record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers)); - record.markAsProcessed(); - } - else { - // FIXME join here - record.markAsProcessed(); - } - } - - @Override - public void evaluateUpper(QueryRecord record) { - AnswerTuples rlAnswer = null; - try { - rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables()); - record.updateUpperBoundAnswers(rlAnswer, true); - } finally { - if (rlAnswer != null) rlAnswer.dispose(); - } - } - - @Override - public void dispose() { - if (importedOntologyPath != null) { - File tmp = new File(importedOntologyPath); - if (tmp.exists()) tmp.delete(); - } - super.dispose(); - } + + Reasoner hermit; + + BasicQueryEngine upperStore = null; + + OWLOntology onto; + OWLDataFactory factory; + + String importedOntologyPath = null; + + QueryRoller roller; + boolean toCheckSatisfiability; + + public HermiTReasoner(boolean toCheckSatisfiability) { + this.toCheckSatisfiability = toCheckSatisfiability; + } + + @Override + public void loadOntology(OWLOntology ontology) { + if(isDisposed()) throw new DisposedException(); + onto = ontology; + } + + @Override + public boolean preprocess() { + if(isDisposed()) throw new DisposedException(); + OWLOntology tbox = onto; + try { + onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); + importedOntologyPath = OWLHelper.getOntologyPath(onto); + } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) { + e.printStackTrace(); + } + + DatalogProgram datalogProgram = new DatalogProgram(tbox, false); + importData(datalogProgram.getAdditionalDataFile()); + upperStore = new MultiStageQueryEngine("rl-upper", false); + upperStore.importRDFData("data", importedData.toString()); + GapByStore4ID gap = new GapByStore4ID(upperStore); + upperStore.materialiseFoldedly(datalogProgram, gap); + gap.clear(); + + factory = onto.getOWLOntologyManager().getOWLDataFactory(); + roller = new QueryRoller(factory); + + hermit = new Reasoner(onto); + return isConsistent(); + } + + @Override + public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + if(toCheckSatisfiability) + return hermit.isConsistent(); + return true; + } + + @Override + public void evaluate(QueryRecord record) { + if(isDisposed()) throw new DisposedException(); + String[] disVars = record.getDistinguishedVariables(); + Set individuals = onto.getIndividualsInSignature(true); + if(disVars.length == 1) { + OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]); + Set answers = new HashSet(); + for(OWLNamedIndividual individual : individuals) { + Utility.logDebug("checking ... " + individual); + if(hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) { + answers.add(new AnswerTuple(new Individual[]{Individual.create(individual.toStringID())})); + } + } + record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers)); + record.markAsProcessed(); + } + else { + // FIXME join here + record.markAsProcessed(); + } + } + + @Override + public void evaluateUpper(QueryRecord record) { + if(isDisposed()) throw new DisposedException(); + AnswerTuples rlAnswer = null; + try { + rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables()); + record.updateUpperBoundAnswers(rlAnswer, true); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + } + + @Override + public void dispose() { + super.dispose(); + if(importedOntologyPath != null) { + File tmp = new File(importedOntologyPath); + if(tmp.exists()) tmp.delete(); + } + } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index 618fb70..8445713 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java @@ -21,362 +21,364 @@ import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderDisjVar1; import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import uk.ac.ox.cs.pagoda.util.tuples.Tuple; import java.util.Collection; class MyQueryReasoner extends QueryReasoner { - OWLOntology ontology; - DatalogProgram program; - -// String additonalDataFile; - BasicQueryEngine rlLowerStore = null; - BasicQueryEngine lazyUpperStore = null; - // BasicQueryEngine limitedSkolemUpperStore; - OWLOntology elho_ontology; -// boolean[] namedIndividuals_lazyUpper; - KarmaQueryEngine elLowerStore = null; - BasicQueryEngine trackingStore = null; - // boolean[] namedIndividuals_tracking; - TrackingRuleEncoder encoder; - private boolean equalityTag; - private boolean multiStageTag; - private Timer t = new Timer(); - private Collection predicatesWithGap = null; - private SatisfiabilityStatus satisfiable; - private ConsistencyManager consistency = new ConsistencyManager(this); - private boolean useUpperStores = false; - public MyQueryReasoner() { - setup(true, true); - } - public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { - setup(multiStageTag, considerEqualities); - } - - private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { - if (multiStageTag) - return new MultiStageQueryEngine(name, checkValidity); -// return new TwoStageQueryEngine(name, checkValidity); - else - return new BasicQueryEngine(name); - } - - public void setup(boolean multiStageTag, boolean considerEqualities) { - satisfiable = SatisfiabilityStatus.UNCHECKED; - this.multiStageTag = multiStageTag; - this.equalityTag = considerEqualities; - - rlLowerStore = new BasicQueryEngine("rl-lower-bound"); - elLowerStore = new KarmaQueryEngine("elho-lower-bound"); - - trackingStore = getUpperStore("tracking", false); - } - - protected void internal_importDataFile(String name, String datafile) { -// addDataFile(datafile); - rlLowerStore.importRDFData(name, datafile); - if (lazyUpperStore != null) - lazyUpperStore.importRDFData(name, datafile); - elLowerStore.importRDFData(name, datafile); - trackingStore.importRDFData(name, datafile); - } - - @Override - public void loadOntology(OWLOntology o) { - if(!equalityTag) { - EqualitiesEliminator eliminator = new EqualitiesEliminator(o); - o = eliminator.getOutputOntology(); - eliminator.save(); - } - - ontology = o; - program = new DatalogProgram(ontology, properties.getToClassify()); + OWLOntology ontology; + DatalogProgram program; + + // String additonalDataFile; + BasicQueryEngine rlLowerStore = null; + BasicQueryEngine lazyUpperStore = null; + MultiStageQueryEngine limitedSkolemUpperStore; + OWLOntology elho_ontology; + // boolean[] namedIndividuals_lazyUpper; + KarmaQueryEngine elLowerStore = null; + BasicQueryEngine trackingStore = null; + // boolean[] namedIndividuals_tracking; + TrackingRuleEncoder encoder; + private boolean equalityTag; + private boolean multiStageTag; + private Timer t = new Timer(); + private Collection predicatesWithGap = null; + private SatisfiabilityStatus satisfiable; + private ConsistencyManager consistency = new ConsistencyManager(this); + private boolean useUpperStores = false; + + public MyQueryReasoner() { + setup(true, true); + } + + public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { + setup(multiStageTag, considerEqualities); + } + + public void setup(boolean multiStageTag, boolean considerEqualities) { + if(isDisposed()) throw new DisposedException(); + satisfiable = SatisfiabilityStatus.UNCHECKED; + this.multiStageTag = multiStageTag; + this.equalityTag = considerEqualities; + + rlLowerStore = new BasicQueryEngine("rl-lower-bound"); + elLowerStore = new KarmaQueryEngine("elho-lower-bound"); + + trackingStore = getUpperStore("tracking", false); + } + + @Override + public void loadOntology(OWLOntology o) { + if(isDisposed()) throw new DisposedException(); + if(!equalityTag) { + EqualitiesEliminator eliminator = new EqualitiesEliminator(o); + o = eliminator.getOutputOntology(); + eliminator.save(); + } + + ontology = o; + program = new DatalogProgram(ontology, properties.getToClassify()); // program.getLower().save(); // program.getUpper().save(); // program.getGeneral().save(); - useUpperStores = multiStageTag && !program.getGeneral().isHorn(); - if(useUpperStores) { - lazyUpperStore = getUpperStore("lazy-upper-bound", true); -// limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); - } + useUpperStores = multiStageTag && !program.getGeneral().isHorn(); + if(useUpperStores) { + lazyUpperStore = getUpperStore("lazy-upper-bound", true); + limitedSkolemUpperStore = new MultiStageQueryEngine("limited-skolem-upper-bound", true); + } - importData(program.getAdditionalDataFile()); + importData(program.getAdditionalDataFile()); - elho_ontology = new ELHOProfile().getFragment(ontology); - elLowerStore.processOntology(elho_ontology); - } + elho_ontology = new ELHOProfile().getFragment(ontology); + elLowerStore.processOntology(elho_ontology); + } - public Collection getPredicatesWithGap() { - return predicatesWithGap; - } + public Collection getPredicatesWithGap() { + if(isDisposed()) throw new DisposedException(); + return predicatesWithGap; + } - @Override - public boolean preprocess() { - t.reset(); - Utility.logInfo("Preprocessing... checking satisfiability... "); + @Override + public boolean preprocess() { + if(isDisposed()) throw new DisposedException(); + t.reset(); + Utility.logInfo("Preprocessing... checking satisfiability... "); - String name = "data", datafile = importedData.toString(); - rlLowerStore.importRDFData(name, datafile); - rlLowerStore.materialise("lower program", program.getLower().toString()); + String name = "data", datafile = importedData.toString(); + rlLowerStore.importRDFData(name, datafile); + rlLowerStore.materialise("lower program", program.getLower().toString()); // program.getLower().save(); - if(!consistency.checkRLLowerBound()) return false; - Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); - - String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); - - elLowerStore.importRDFData(name, datafile); - elLowerStore.materialise("saturate named individuals", originalMarkProgram); - elLowerStore.materialise("lower program", program.getLower().toString()); - elLowerStore.initialiseKarma(); - if(!consistency.checkELLowerBound()) return false; - - if(lazyUpperStore != null) { - lazyUpperStore.importRDFData(name, datafile); - lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); - int tag = lazyUpperStore.materialiseRestrictedly(program, null); - if(tag != 1) { - lazyUpperStore.dispose(); - lazyUpperStore = null; - } - if(tag == -1) return false; - } - if(consistency.checkUpper(lazyUpperStore)) { - satisfiable = SatisfiabilityStatus.SATISFIABLE; - Utility.logInfo("time for satisfiability checking: " + t.duration()); - } - -// if(limitedSkolemUpperStore != null) { -// limitedSkolemUpperStore.importRDFData(name, datafile); -// limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); -// int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); -// if(tag != 1) { -// limitedSkolemUpperStore.dispose(); -// limitedSkolemUpperStore = null; -// } -// if(tag == -1) return false; -// } -// if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { -// satisfiable = SatisfiabilityStatus.SATISFIABLE; -// Utility.logInfo("time for satisfiability checking: " + t.duration()); -// } - - trackingStore.importRDFData(name, datafile); - trackingStore.materialise("saturate named individuals", originalMarkProgram); + if(!consistency.checkRLLowerBound()) return false; + Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); + + String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); + + elLowerStore.importRDFData(name, datafile); + elLowerStore.materialise("saturate named individuals", originalMarkProgram); + elLowerStore.materialise("lower program", program.getLower().toString()); + elLowerStore.initialiseKarma(); + if(!consistency.checkELLowerBound()) return false; + + if(lazyUpperStore != null) { + lazyUpperStore.importRDFData(name, datafile); + lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); + int tag = lazyUpperStore.materialiseRestrictedly(program, null); + if(tag != 1) { + lazyUpperStore.dispose(); + lazyUpperStore = null; + } + if(tag == -1) return false; + } + if(consistency.checkUpper(lazyUpperStore)) { + satisfiable = SatisfiabilityStatus.SATISFIABLE; + Utility.logInfo("time for satisfiability checking: " + t.duration()); + } + + if(limitedSkolemUpperStore != null) { + limitedSkolemUpperStore.importRDFData(name, datafile); + limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); + int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); + if(tag != 1) { + limitedSkolemUpperStore.dispose(); + limitedSkolemUpperStore = null; + } + if(tag == -1) return false; + } + if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { + satisfiable = SatisfiabilityStatus.SATISFIABLE; + Utility.logInfo("time for satisfiability checking: " + t.duration()); + } + + trackingStore.importRDFData(name, datafile); + trackingStore.materialise("saturate named individuals", originalMarkProgram); // materialiseFullUpper(); -// GapByStore4ID gap = new GapByStore4ID(trackingStore); - GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); - trackingStore.materialiseFoldedly(program, gap); - predicatesWithGap = gap.getPredicatesWithGap(); - gap.clear(); - - if(program.getGeneral().isHorn()) - encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); - else - encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); +// GapByStore4ID gap = new GapByStore4ID(trackingStore); + GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); + trackingStore.materialiseFoldedly(program, gap); + predicatesWithGap = gap.getPredicatesWithGap(); + gap.clear(); + + if(program.getGeneral().isHorn()) + encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); + else + encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); // encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); // encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); // encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); - program.deleteABoxTurtleFile(); - - if(!isConsistent()) - return false; - - consistency.extractBottomFragment(); - consistency.dispose(); - - return true; - } - - @Override - public boolean isConsistent() { - if(satisfiable == SatisfiabilityStatus.UNCHECKED) { - satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE; - Utility.logInfo("time for satisfiability checking: " + t.duration()); - } - return satisfiable == SatisfiabilityStatus.SATISFIABLE; - } - - /** - * It deals with blanks nodes differently from variables - * according to SPARQL semantics for OWL2 Entailment Regime. - *

- * In particular variables are matched only against named individuals, - * and blank nodes against named and anonymous individuals. - */ - private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord, - Tuple extendedQuery, Step step) { - - if(queryRecord.hasNonAnsDistinguishedVariables()) - queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables()); - else - queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); - - queryRecord.addProcessingTime(step, t.duration()); - if(queryRecord.isProcessed()) { - queryRecord.setDifficulty(step); - return true; - } - return false; - } - - /** - * Returns the part of the ontology relevant for Hermit, while computing the bound answers. - * */ - private boolean queryBounds(QueryRecord queryRecord) { - AnswerTuples rlAnswer = null, elAnswer = null; - - t.reset(); - try { - rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); - Utility.logDebug(t.duration()); - queryRecord.updateLowerBoundAnswers(rlAnswer); - } finally { - if (rlAnswer != null) rlAnswer.dispose(); - } - queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); - - t.reset(); - - Tuple extendedQueryTexts = queryRecord.getExtendedQueryText(); - - Utility.logDebug("Tracking store"); - if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) - return true; - - if(!queryRecord.isBottom()) { - Utility.logDebug("Lazy store"); - if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) - return true; + if(!isConsistent()) + return false; + + consistency.extractBottomFragment(); + consistency.dispose(); + + program.dispose(); + + return true; + } + + @Override + public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + if(satisfiable == SatisfiabilityStatus.UNCHECKED) { + satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE; + Utility.logInfo("time for satisfiability checking: " + t.duration()); + } + return satisfiable == SatisfiabilityStatus.SATISFIABLE; + } + + @Override + public void evaluate(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); + if(queryBounds(queryRecord)) + return; + + OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); + + int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size(); + Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset + .getAxiomCount() - aBoxCount)); +// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); + + if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) + return; + + Timer t = new Timer(); + Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); + summarisedChecker.check(queryRecord.getGapAnswers()); + summarisedChecker.dispose(); + Utility.logDebug("Total time for full reasoner: " + t.duration()); + queryRecord.markAsProcessed(); + Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); + } + + @Override + public void evaluateUpper(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); + // TODO add new upper store + AnswerTuples rlAnswer = null; + boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; + try { + rlAnswer = + (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); + queryRecord.updateUpperBoundAnswers(rlAnswer, true); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + } + + @Override + public void dispose() { + super.dispose(); + + if(encoder != null) encoder.dispose(); + if(rlLowerStore != null) rlLowerStore.dispose(); + if(lazyUpperStore != null) lazyUpperStore.dispose(); + if(elLowerStore != null) elLowerStore.dispose(); + if(trackingStore != null) trackingStore.dispose(); + if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); + + } + + private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { + if(multiStageTag) + return new MultiStageQueryEngine(name, checkValidity); +// return new TwoStageQueryEngine(name, checkValidity); + else + return new BasicQueryEngine(name); + } + + protected void internal_importDataFile(String name, String datafile) { +// addDataFile(datafile); + rlLowerStore.importRDFData(name, datafile); + if(lazyUpperStore != null) + lazyUpperStore.importRDFData(name, datafile); + elLowerStore.importRDFData(name, datafile); + trackingStore.importRDFData(name, datafile); + } + + /** + * It deals with blanks nodes differently from variables + * according to SPARQL semantics for OWL2 Entailment Regime. + *

+ * In particular variables are matched only against named individuals, + * and blank nodes against named and anonymous individuals. + */ + private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord, + Tuple extendedQuery, Step step) { + + if(queryRecord.hasNonAnsDistinguishedVariables()) + queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables()); + else + queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); + + queryRecord.addProcessingTime(step, t.duration()); + if(queryRecord.isProcessed()) { + queryRecord.setDifficulty(step); + return true; + } + return false; + } + + /** + * Returns the part of the ontology relevant for Hermit, while computing the bound answers. + */ + private boolean queryBounds(QueryRecord queryRecord) { + AnswerTuples rlAnswer = null, elAnswer = null; + + t.reset(); + try { + rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); + Utility.logDebug(t.duration()); + queryRecord.updateLowerBoundAnswers(rlAnswer); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); + + t.reset(); + + Tuple extendedQueryTexts = queryRecord.getExtendedQueryText(); + + Utility.logDebug("Tracking store"); + if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND)) + return true; + + if(!queryRecord.isBottom()) { + Utility.logDebug("Lazy store"); + if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) + return true; // Utility.logDebug("Skolem store"); // if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND)) // return null; - } - - t.reset(); - try { - elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0), - queryRecord.getAnswerVariables(), - queryRecord.getLowerBoundAnswers()); - Utility.logDebug(t.duration()); - queryRecord.updateLowerBoundAnswers(elAnswer); - } finally { - if (elAnswer != null) elAnswer.dispose(); - } - queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); - - if(queryRecord.isProcessed()) { - queryRecord.setDifficulty(Step.EL_LOWER_BOUND); - return true; - } - - return false; - } - - private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { - t.reset(); - - QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); - OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true); - - queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); - - return relevantOntologySubset; - } - - private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { - AnswerTuples rlAnswer = null; - try { - Utility.logDebug(queryText); - rlAnswer = upperStore.evaluate(queryText, answerVariables); - Utility.logDebug(t.duration()); - queryRecord.updateUpperBoundAnswers(rlAnswer); - } finally { - if(rlAnswer != null) rlAnswer.dispose(); - } - } - - @Override - public void evaluate(QueryRecord queryRecord) { - if(queryBounds(queryRecord)) - return; - - OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); - - int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size(); - Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset - .getAxiomCount() - aBoxCount)); -// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); - - if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) - return; - - Timer t = new Timer(); - Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); - summarisedChecker.check(queryRecord.getGapAnswers()); - summarisedChecker.dispose(); - Utility.logDebug("Total time for full reasoner: " + t.duration()); - queryRecord.markAsProcessed(); - Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); - } - - private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { - MultiStageQueryEngine relevantStore = - new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true - DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false - -// relevantStore.importRDFData("data", importedData.toString()); // 2 answers more - relevantStore.importDataFromABoxOf(relevantSubset); - - int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); - if(materialisationResult != 1) - throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency + } + + t.reset(); + try { + elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0), + queryRecord.getAnswerVariables(), + queryRecord.getLowerBoundAnswers()); + Utility.logDebug(t.duration()); + queryRecord.updateLowerBoundAnswers(elAnswer); + } finally { + if(elAnswer != null) elAnswer.dispose(); + } + queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration()); + + if(queryRecord.isProcessed()) { + queryRecord.setDifficulty(Step.EL_LOWER_BOUND); + return true; + } + + return false; + } + + private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { + t.reset(); + + QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); + OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true); + + queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); + + return relevantOntologySubset; + } + + private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { + AnswerTuples rlAnswer = null; + try { + Utility.logDebug(queryText); + rlAnswer = upperStore.evaluate(queryText, answerVariables); + Utility.logDebug(t.duration()); + queryRecord.updateUpperBoundAnswers(rlAnswer); + } finally { + if(rlAnswer != null) rlAnswer.dispose(); + } + } + + private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { + DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false + + MultiStageQueryEngine relevantStore = + new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true +// relevantStore.importRDFData("data", relevantProgram.getAdditionalDataFile()); // tried, doesn't work + relevantStore.importDataFromABoxOf(relevantSubset); + + int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); +// int materialisationResult = relevantStore.materialiseRestrictedly(relevantProgram, null); // DOESN'T WORK!!! + if(materialisationResult != 1) + throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency // relevantStore.materialiseRestrictedly(relevantProgram, null); // it has been tried - return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); - - // the following has been tried -// Tuple extendedQueryText = queryRecord.getExtendedQueryText(); -// if(queryRecord.hasNonAnsDistinguishedVariables()) { -// queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(0), queryRecord.getAnswerVariables()); -// queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(1), queryRecord.getDistinguishedVariables()); -// } -// else -// queryUpperBound(relevantStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); -// -// return queryRecord.isProcessed(); - - } - - @Override - public void evaluateUpper(QueryRecord queryRecord) { - // TODO add new upper store - AnswerTuples rlAnswer = null; - boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; - try { - rlAnswer = - (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); - queryRecord.updateUpperBoundAnswers(rlAnswer, true); - } finally { - if(rlAnswer != null) rlAnswer.dispose(); - } - } - - @Override - public void dispose() { - if (encoder != null) encoder.dispose(); - if (rlLowerStore != null) rlLowerStore.dispose(); - if (lazyUpperStore != null) lazyUpperStore.dispose(); - if (elLowerStore != null) elLowerStore.dispose(); - if (trackingStore != null) trackingStore.dispose(); - -// if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); - super.dispose(); - } - - enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} + return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); + +// return queryUpperStore(limitedSkolemUpperStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); + } + + enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java index b9abf07..3200216 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java @@ -1,17 +1,15 @@ package uk.ac.ox.cs.pagoda.reasoner; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.util.disposable.Disposable; + import java.util.Collection; -import uk.ac.ox.cs.pagoda.query.AnswerTuples; +public abstract class QueryEngine extends Disposable { + + public abstract void evaluate(Collection queryTexts, String answerFile); -public interface QueryEngine { + public abstract AnswerTuples evaluate(String queryText); - public void evaluate(Collection queryTexts, String answerFile); - - public AnswerTuples evaluate(String queryText); - - public AnswerTuples evaluate(String queryText, String[] answerVariables); - - public void dispose(); - + public abstract AnswerTuples evaluate(String queryText, String[] answerVariables); } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java index 118c1b2..962a78f 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java @@ -9,6 +9,8 @@ import uk.ac.ox.cs.pagoda.query.QueryRecord; import uk.ac.ox.cs.pagoda.util.PagodaProperties; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.Disposable; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import java.io.BufferedWriter; import java.io.File; @@ -18,230 +20,242 @@ import java.nio.file.Paths; import java.util.Collection; // TODO clean APIs -public abstract class QueryReasoner { - - public static final String ImportDataFileSeparator = ";"; - private static final boolean DEFAULT_MULTI_STAGES = true; - private static final boolean DEFAULT_EQUALITIES = true; - public boolean fullReasoner = this instanceof MyQueryReasoner; - protected StringBuilder importedData = new StringBuilder(); -// protected boolean forSemFacet = false; -PagodaProperties properties; - BufferedWriter answerWriter = null; - private QueryManager m_queryManager = new QueryManager(); - - public static QueryReasoner getInstance(PagodaProperties p) { - OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); - QueryReasoner pagoda = getInstance(ontology, p); - pagoda.properties = p; - pagoda.loadOntology(ontology); - pagoda.importData(p.getDataPath()); - if (pagoda.preprocess()) { - Utility.logInfo("The ontology is consistent!"); - return pagoda; - } - else { - System.out.println("The ontology is inconsistent!"); - pagoda.dispose(); - return null; - } - } - - public static QueryReasoner getInstance(OWLOntology o) { - QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); - pagoda.properties = new PagodaProperties(); - return pagoda; - } - - private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) { - return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); - } - - public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { +public abstract class QueryReasoner extends Disposable { + + public static final String ImportDataFileSeparator = ";"; + private static final boolean DEFAULT_MULTI_STAGES = true; + private static final boolean DEFAULT_EQUALITIES = true; + public boolean fullReasoner = this instanceof MyQueryReasoner; + protected StringBuilder importedData = new StringBuilder(); + // protected boolean forSemFacet = false; + PagodaProperties properties; + BufferedWriter answerWriter = null; + private QueryManager m_queryManager = new QueryManager(); + + public static QueryReasoner getInstance(PagodaProperties p) { + OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); + QueryReasoner pagoda = getInstance(ontology, p); + pagoda.properties = p; + pagoda.loadOntology(ontology); + pagoda.importData(p.getDataPath()); + if(pagoda.preprocess()) { + Utility.logInfo("The ontology is consistent!"); + return pagoda; + } + else { + System.out.println("The ontology is inconsistent!"); + pagoda.dispose(); + return null; + } + } + + public static QueryReasoner getInstance(OWLOntology o) { + QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); + pagoda.properties = new PagodaProperties(); + return pagoda; + } + + private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) { + return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES); + } + + public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { // Utility.initialise(); - QueryReasoner reasoner; - if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); - else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); - else - switch (type) { - case RLU: - reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); - break; - case ELHOU: - reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); - break; - default: - reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); - } - return reasoner; - } - - public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { - return new HermiTReasoner(toCheckSatisfiability); - } - - public void setToClassify(boolean flag) { - properties.setToClassify(flag); - } - - public void setToCallHermiT(boolean flag) { - properties.setToCallHermiT(flag); - } - - public void importData(String datafile) { - if (datafile != null && !datafile.equalsIgnoreCase("null")) - importData(datafile.split(ImportDataFileSeparator)); - } - - public void importData(String[] datafiles) { - if (datafiles != null) { - for (String datafile: datafiles) { - File file = new File(datafile); - if (file.exists()) { - if (file.isFile()) importDataFile(file); - else importDataDirectory(file); - } - else { - Utility.logError("warning: file " + datafile + " doesn't exists."); - } - } - } - } - - private void importDataDirectory(File file) { - for (File child: file.listFiles()) - if (child.isFile()) importDataFile(child); - else importDataDirectory(child); - } - - private void importDataFile(File file) { - String datafile; - try { - datafile = file.getCanonicalPath(); - } catch (IOException e) { - e.printStackTrace(); - return; - } - importDataFile(datafile); - } - - protected final void importDataFile(String datafile) { - if (importedData.length() == 0) - importedData.append(datafile); - else - importedData.append(ImportDataFileSeparator).append(datafile); - - } - - public abstract void loadOntology(OWLOntology ontology); - - public abstract boolean preprocess(); - - public abstract boolean isConsistent(); - - public abstract void evaluate(QueryRecord record); - - public abstract void evaluateUpper(QueryRecord record); - - public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { - if (forFacetGeneration) { - QueryRecord record = m_queryManager.create(queryText); - Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); - if(!record.isProcessed()) - evaluateUpper(record); + QueryReasoner reasoner; + if(OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); + else if(OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); + else + switch(type) { + case RLU: + reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); + break; + case ELHOU: + reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); + break; + default: + reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); + } + return reasoner; + } + + public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { + return new HermiTReasoner(toCheckSatisfiability); + } + + public void setToClassify(boolean flag) { + if(isDisposed()) throw new DisposedException(); + properties.setToClassify(flag); + } + + public void setToCallHermiT(boolean flag) { + if(isDisposed()) throw new DisposedException(); + properties.setToCallHermiT(flag); + } + + public void importData(String datafile) { + if(isDisposed()) throw new DisposedException(); + if(datafile != null && !datafile.equalsIgnoreCase("null")) + importData(datafile.split(ImportDataFileSeparator)); + } + + public void importData(String[] datafiles) { + if(isDisposed()) throw new DisposedException(); + if(datafiles != null) { + for(String datafile : datafiles) { + File file = new File(datafile); + if(file.exists()) { + if(file.isFile()) importDataFile(file); + else importDataDirectory(file); + } + else { + Utility.logError("warning: file " + datafile + " doesn't exists."); + } + } + } + } + + public abstract void loadOntology(OWLOntology ontology); + + public abstract boolean preprocess(); + + public abstract boolean isConsistent(); + + public abstract void evaluate(QueryRecord record); + + public abstract void evaluateUpper(QueryRecord record); + + public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { + if(isDisposed()) throw new DisposedException(); + if(forFacetGeneration) { + QueryRecord record = m_queryManager.create(queryText); + Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); + if(!record.isProcessed()) + evaluateUpper(record); // AnswerTuples tuples = record.getUpperBoundAnswers(); // for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { // tuple = tuples.getTuple(); // if (tuple.toString().contains("NC")) // System.out.println(tuple.toString()); // } - return record.getUpperBoundAnswers(); - } else - return evaluate(queryText); - } - -// public void evaluate(Collection queryRecords) { -// evaluate(queryRecords); -// } + return record.getUpperBoundAnswers(); + } + else + return evaluate(queryText); + } + + public AnswerTuples evaluate(String queryText) { + if(isDisposed()) throw new DisposedException(); + QueryRecord record = m_queryManager.create(queryText); + Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); + if(!record.isProcessed()) + evaluate(record); + AnswerTuples answer = record.getAnswers(); + record.dispose(); + return answer; + + } + + public void evaluate_shell(String queryText) { + if(isDisposed()) throw new DisposedException(); + QueryRecord record = m_queryManager.create(queryText); + Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); + if(!record.isProcessed()) + evaluate(record); + Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); + record.dispose(); + + } - public AnswerTuples evaluate(String queryText) { - QueryRecord record = m_queryManager.create(queryText); - Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); - if(!record.isProcessed()) - evaluate(record); - AnswerTuples answer = record.getAnswers(); - record.dispose(); - return answer; - - } - - public void evaluate_shell(String queryText) { - QueryRecord record = m_queryManager.create(queryText); - Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); - if(!record.isProcessed()) - evaluate(record); - Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); - record.dispose(); - - } - - public void evaluate(Collection queryRecords) { - if (!isConsistent()) { - Utility.logDebug("The ontology and dataset is inconsistent."); - return; - } - - if(properties.getAnswerPath() != null && answerWriter == null) { - try { - answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath())); - } catch (IOException e) { - Utility.logError("The answer path is not valid!"); - e.printStackTrace(); - } - } - - Timer t = new Timer(); - Gson gson = QueryRecord.GsonCreator.getInstance(); - for (QueryRecord record: queryRecords) { + public void evaluate(Collection queryRecords) { + if(isDisposed()) throw new DisposedException(); + if(!isConsistent()) { + Utility.logDebug("The ontology and dataset is inconsistent."); + return; + } + + if(properties.getAnswerPath() != null && answerWriter == null) { + try { + answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath())); + } catch(IOException e) { + Utility.logError("The answer path is not valid!"); + e.printStackTrace(); + } + } + + Timer t = new Timer(); + Gson gson = QueryRecord.GsonCreator.getInstance(); + for(QueryRecord record : queryRecords) { // if (Integer.parseInt(record.getQueryID()) != 218) continue; - Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", - record.getQueryText()); - if(!record.isProcessed()) { - t.reset(); - if(!record.isProcessed()) - evaluate(record); - Utility.logInfo("Total time to answer this query: " + t.duration()); - if(!fullReasoner && !record.isProcessed()) { - Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); - continue; - } - } - record.outputAnswerStatistics(); - record.outputTimes(); - } - /* TODO it can handle one call only + Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", + record.getQueryText()); + if(!record.isProcessed()) { + t.reset(); + if(!record.isProcessed()) + evaluate(record); + Utility.logInfo("Total time to answer this query: " + t.duration()); + if(!fullReasoner && !record.isProcessed()) { + Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); + continue; + } + } + record.outputAnswerStatistics(); + record.outputTimes(); + } + /* TODO it can handle one call only if you call twice, you will end up with a json file with multiple roots */ - if(answerWriter != null) gson.toJson(queryRecords, answerWriter); + if(answerWriter != null) gson.toJson(queryRecords, answerWriter); // queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record))); - queryRecords.stream().forEach(record -> record.dispose()); - } - - public void dispose() { - if (answerWriter != null) { - try { - answerWriter.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } + queryRecords.stream().forEach(record -> record.dispose()); + } + +// public void evaluate(Collection queryRecords) { +// evaluate(queryRecords); +// } + + @Override + public void dispose() { + super.dispose(); + if(answerWriter != null) { + try { + answerWriter.close(); + } catch(IOException e) { + e.printStackTrace(); + } + } // Utility.cleanup(); - } - - public QueryManager getQueryManager() { - return m_queryManager; - } + } + + public QueryManager getQueryManager() { + if(isDisposed()) throw new DisposedException(); + return m_queryManager; + } + + private void importDataDirectory(File file) { + for(File child : file.listFiles()) + if(child.isFile()) importDataFile(child); + else importDataDirectory(child); + } + + private void importDataFile(File file) { + String datafile; + try { + datafile = file.getCanonicalPath(); + } catch(IOException e) { + e.printStackTrace(); + return; + } + importDataFile(datafile); + } + + protected final void importDataFile(String datafile) { + if(importedData.length() == 0) + importedData.append(datafile); + else + importedData.append(ImportDataFileSeparator).append(datafile); + + } - public enum Type {Full, RLU, ELHOU} + public enum Type {Full, RLU, ELHOU} } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java index bea5bbf..16e2627 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java @@ -9,6 +9,7 @@ import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; class RLQueryReasoner extends QueryReasoner { @@ -16,13 +17,14 @@ class RLQueryReasoner extends QueryReasoner { LowerDatalogProgram program; Timer t = new Timer(); - + public RLQueryReasoner() { rlLowerStore = new BasicQueryEngine("rl"); } @Override public void evaluate(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); AnswerTuples rlAnswer = null; t.reset(); try { @@ -38,12 +40,13 @@ class RLQueryReasoner extends QueryReasoner { @Override public void dispose() { - if (rlLowerStore != null) rlLowerStore.dispose(); super.dispose(); + if(rlLowerStore != null) rlLowerStore.dispose(); } @Override public void loadOntology(OWLOntology ontology) { + if(isDisposed()) throw new DisposedException(); program = new LowerDatalogProgram(); program.load(ontology, new UnaryBottom()); program.transform(); @@ -53,6 +56,7 @@ class RLQueryReasoner extends QueryReasoner { @Override public boolean preprocess() { + if(isDisposed()) throw new DisposedException(); rlLowerStore.importRDFData("data", importedData.toString()); rlLowerStore.materialise("lower program", program.toString()); @@ -61,6 +65,7 @@ class RLQueryReasoner extends QueryReasoner { @Override public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); AnswerTuples ans = null; try { ans = rlLowerStore.evaluate(QueryRecord.botQueryText, new String[] {"X"}); @@ -74,6 +79,7 @@ class RLQueryReasoner extends QueryReasoner { @Override public void evaluateUpper(QueryRecord record) { + if(isDisposed()) throw new DisposedException(); evaluate(record); } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java index 547140a..d0712e1 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java @@ -10,6 +10,7 @@ import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; class RLUQueryReasoner extends QueryReasoner { @@ -19,7 +20,7 @@ class RLUQueryReasoner extends QueryReasoner { boolean multiStageTag, equalityTag; Timer t = new Timer(); - + public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { this.multiStageTag = multiStageTag; this.equalityTag = considerEqualities; @@ -32,6 +33,7 @@ class RLUQueryReasoner extends QueryReasoner { @Override public void evaluate(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); AnswerTuples ans = null; t.reset(); try { @@ -60,6 +62,7 @@ class RLUQueryReasoner extends QueryReasoner { @Override public void evaluateUpper(QueryRecord queryRecord) { + if(isDisposed()) throw new DisposedException(); AnswerTuples ans = null; try { ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); @@ -72,13 +75,14 @@ class RLUQueryReasoner extends QueryReasoner { @Override public void dispose() { + super.dispose(); if (rlLowerStore != null) rlLowerStore.dispose(); if (rlUpperStore != null) rlUpperStore.dispose(); - super.dispose(); } @Override public void loadOntology(OWLOntology o) { + if(isDisposed()) throw new DisposedException(); if (!equalityTag) { EqualitiesEliminator eliminator = new EqualitiesEliminator(o); o = eliminator.getOutputOntology(); @@ -92,6 +96,7 @@ class RLUQueryReasoner extends QueryReasoner { @Override public boolean preprocess() { + if(isDisposed()) throw new DisposedException(); String datafile = importedData.toString(); rlLowerStore.importRDFData("data", datafile); rlLowerStore.materialise("lower program", program.getLower().toString()); @@ -105,6 +110,7 @@ class RLUQueryReasoner extends QueryReasoner { @Override public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); String[] X = new String[] { "X" }; AnswerTuples ans = null; try { @@ -125,6 +131,5 @@ class RLUQueryReasoner extends QueryReasoner { Utility.logDebug("The consistency of the data has not been determined yet."); return true; } - - + } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java index 79be8aa..fe43e09 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java @@ -1,375 +1,377 @@ package uk.ac.ox.cs.pagoda.reasoner.light; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - import org.semanticweb.HermiT.model.DLClause; - +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; +import uk.ac.ox.cs.JRDFox.store.Parameters; +import uk.ac.ox.cs.JRDFox.store.TripleStatus; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; import uk.ac.ox.cs.pagoda.query.AnswerTuples; import uk.ac.ox.cs.pagoda.query.GapByStore4ID; import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.rules.Program; -import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; -import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.pagoda.util.*; import uk.ac.ox.cs.pagoda.util.Timer; -import uk.ac.ox.cs.pagoda.util.UFS; -import uk.ac.ox.cs.pagoda.util.Utility; -import uk.ac.ox.cs.JRDFox.JRDFStoreException; -import uk.ac.ox.cs.JRDFox.store.DataStore; -import uk.ac.ox.cs.JRDFox.store.Parameters; -import uk.ac.ox.cs.JRDFox.store.TripleStatus; -import uk.ac.ox.cs.JRDFox.store.TupleIterator; -import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; + +import java.util.*; public class BasicQueryEngine extends RDFoxQueryEngine { - protected DataStore store; - protected Parameters parameters = new Parameters(); - - public BasicQueryEngine(String name) { - super(name); - store = RDFoxQueryEngine.createDataStore(); - parameters.m_allAnswersInRoot = true; - parameters.m_useBushy = true; - } - - public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { - if (gap != null) { - materialise("lower program", dProgram.getLower().toString()); - String program = dProgram.getUpper().toString(); - try { - gap.compile(program); - gap.addBackTo(); - getDataStore().clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - gap.clear(); - } - } - else - materialise("upper program", dProgram.getUpper().toString()); - } - - public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { - if (gap != null) { - materialise("lower program", dProgram.getLower().toString()); - String program = dProgram.getUpper().toString(); - try { - gap.compile(program); - gap.addBackTo(); - getDataStore().clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - gap.clear(); - } - } - else - materialise("upper program", dProgram.getUpper().toString()); - - return 1; - } - - @Override - public AnswerTuples evaluate(String queryText) { - return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); - } - - @Override - public AnswerTuples evaluate(String queryText, String[] answerVars) { - TupleIterator tupleIterator; - try { - tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); - } catch (JRDFStoreException e) { - e.printStackTrace(); - return null; - } - return new RDFoxAnswerTuples(answerVars, tupleIterator); - } - - @Override - public DataStore getDataStore() { - return store; - } - - @Override - public void dispose() { - store.dispose(); - } - - protected void outputClassAssertions(String filename) { - TupleIterator allTuples = null; - boolean redirect = false; - try { - allTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); - redirect = Utility.redirectCurrentOut(filename); - for (long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) - System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager.getQuotedTerm(allTuples.getResource(1))); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - if (redirect) Utility.closeCurrentOut(); - if (allTuples != null) allTuples.dispose(); - } - } - - public void outputInstance4BinaryPredicate(String iri, String filename) { - Utility.redirectCurrentOut(filename); - outputInstance4BinaryPredicate(iri); - Utility.closeCurrentOut(); - } - - public void outputInstance4BinaryPredicate(String iri) { - outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); - } - - public void outputInstanceNumbers(String filename) { - TupleIterator predicateTuples = null; - TupleIterator instanceTuples; - Set number = new HashSet(); - String predicate; - try { - predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); - for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { - predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); - instanceTuples = null; - try { - instanceTuples = getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); - long totalCount = 0; - for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { - totalCount += instanceTuples.getMultiplicity(); - } - number.add(predicate + " * " + totalCount); - } finally { - if (instanceTuples != null) instanceTuples.dispose(); - } - } - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - if (predicateTuples != null) predicateTuples.dispose(); - predicateTuples = null; - } - - try { - predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); - for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { - predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); - instanceTuples = null; - try { - instanceTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); - ; - long totalCount = 0; - for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) - totalCount += instanceTuples.getMultiplicity(); - number.add(predicate + " * " + totalCount); - } finally { - if (instanceTuples != null) instanceTuples.dispose(); - } - } - - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - if (predicateTuples != null) predicateTuples.dispose(); - predicateTuples = null; - } - - Utility.redirectCurrentOut(filename); - String[] ordered = number.toArray(new String[0]); - Arrays.sort(ordered, new DLPredicateComparator()); - for (String line: ordered) System.out.println(line); - Utility.closeCurrentOut(); - - } - - public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { - TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); + protected DataStore store; + protected Parameters parameters = new Parameters(); + Set materialisedRules = new HashSet(); + private UFS equalityGroups = null; + + public BasicQueryEngine(String name) { + super(name); + store = RDFoxQueryEngine.createDataStore(); + parameters.m_allAnswersInRoot = true; + parameters.m_useBushy = true; + } + + public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { + if(isDisposed()) throw new DisposedException(); + if(gap != null) { + materialise("lower program", dProgram.getLower().toString()); + String program = dProgram.getUpper().toString(); + try { + gap.compile(program); + gap.addBackTo(); + getDataStore().clearRulesAndMakeFactsExplicit(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + gap.clear(); + } + } + else + materialise("upper program", dProgram.getUpper().toString()); + } + + public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { + if(isDisposed()) throw new DisposedException(); + if(gap != null) { + materialise("lower program", dProgram.getLower().toString()); + String program = dProgram.getUpper().toString(); + try { + gap.compile(program); + gap.addBackTo(); + getDataStore().clearRulesAndMakeFactsExplicit(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + gap.clear(); + } + } + else + materialise("upper program", dProgram.getUpper().toString()); + + return 1; + } + + @Override + public AnswerTuples evaluate(String queryText) { + if(isDisposed()) throw new DisposedException(); + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); + } + + @Override + public AnswerTuples evaluate(String queryText, String[] answerVars) { + if(isDisposed()) throw new DisposedException(); + TupleIterator tupleIterator; + try { + tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); + } catch(JRDFStoreException e) { + e.printStackTrace(); + return null; + } + return new RDFoxAnswerTuples(answerVars, tupleIterator); + } + + @Override + public DataStore getDataStore() { + if(isDisposed()) throw new DisposedException(); + return store; + } + + @Override + public void dispose() { + super.dispose(); + store.dispose(); + } + + public void outputInstance4BinaryPredicate(String iri, String filename) { + Utility.redirectCurrentOut(filename); + outputInstance4BinaryPredicate(iri); + Utility.closeCurrentOut(); + } + + public void outputInstance4BinaryPredicate(String iri) { + outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); + } + + public void outputInstanceNumbers(String filename) { + TupleIterator predicateTuples = null; + TupleIterator instanceTuples; + Set number = new HashSet(); + String predicate; + try { + predicateTuples = + getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); + for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { + predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); + instanceTuples = null; + try { + instanceTuples = + getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); + long totalCount = 0; + for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { + totalCount += instanceTuples.getMultiplicity(); + } + number.add(predicate + " * " + totalCount); + } finally { + if(instanceTuples != null) instanceTuples.dispose(); + } + } + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + if(predicateTuples != null) predicateTuples.dispose(); + predicateTuples = null; + } + + try { + predicateTuples = + getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); + for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { + predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); + instanceTuples = null; + try { + instanceTuples = + getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); + long totalCount = 0; + for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) + totalCount += instanceTuples.getMultiplicity(); + number.add(predicate + " * " + totalCount); + } finally { + if(instanceTuples != null) instanceTuples.dispose(); + } + } + + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + if(predicateTuples != null) predicateTuples.dispose(); + predicateTuples = null; + } + + Utility.redirectCurrentOut(filename); + String[] ordered = number.toArray(new String[0]); + Arrays.sort(ordered, new DLPredicateComparator()); + for(String line : ordered) System.out.println(line); + Utility.closeCurrentOut(); + + } + + public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { + TupleIterator iter = + store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); +// iter.open(); + return iter; + } + + public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); +// iter.open(); + return iter; + } + + public void setExpandEquality(boolean flag) { + parameters.m_expandEquality = flag; + } + + public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { + parameters.m_expandEquality = false; + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); // iter.open(); - return iter; - } - - public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { - TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); -// iter.open(); - return iter; - } - - public void setExpandEquality(boolean flag) { - parameters.m_expandEquality = flag; - } - - public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { - parameters.m_expandEquality = false; - TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); -// iter.open(); - parameters.m_expandEquality = true; - return iter; - } - - - public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { - return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); - } - - Set materialisedRules = new HashSet(); - - public String getUnusedRules(Collection clauses, boolean toUpdate) { - DLClause clause; - for (Iterator iter = clauses.iterator(); iter.hasNext(); ) { - if (materialisedRules.contains(clause = iter.next())) - iter.remove(); - else if (toUpdate) materialisedRules.add(clause); - } - - if (clauses.isEmpty()) return null; - - return Program.toString(clauses); - } - - public void outputMaterialisedRules() { - System.out.println(DLClauseHelper.toString(materialisedRules)); - } - - public void outputAnswers(String query) { - TupleIterator iter = null; - try { - iter = internal_evaluate(query); - System.out.println(query); - int arity = iter.getArity(); - for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { - for (int i = 0; i < arity; ++i) - System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); - System.out.println(); - } - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - if (iter != null) iter.dispose(); - } - } - - public void outputInstance4UnaryPredicate(String iri) { - outputAnswers("select ?x where { ?x " - + " <" - + iri - + "> .}"); - } - - public void outputSubjects(String p, String o) { - outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); - } - - public void outputObjects(String s, String p) { - outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); - } - - public void outputIDBFacts() { - TupleIterator iter = null; - try { - iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); - for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { - for (int i = 0; i < 3; ++i) - System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); - System.out.println(); - } - } catch (JRDFStoreException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } finally { - if (iter != null) iter.dispose(); - } - - } - - public void outputType4Individual(String iri) { - outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); - } - - public int getSameAsNumber() { - TupleIterator iter = null; - int counter = 0; - try { - iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); - for (long multi = iter.open(); multi != 0; multi = iter.getNext()) - if (iter.getResourceID(0) != iter.getResourceID(1)) - ++counter; - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - if (iter != null) iter.dispose(); - } - return counter; - } - - private UFS equalityGroups = null; - - public UFS getEqualityGroups(boolean reuse) { - if (reuse && equalityGroups != null) return equalityGroups; - - equalityGroups = new UFS(); - - TupleIterator answers = null; - try { - Timer t = new Timer(); - answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }"); - for (long multi = answers.open(); multi != 0; multi = answers.getNext()) { - if (answers.getResourceID(0) != answers.getResourceID(1)) - equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); - } - Utility.logInfo("@Time to group individuals by equality: " + t.duration()); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } finally { - if (answers != null) answers.dispose(); - } - - return equalityGroups; - } - - public void clearRulesAndIDBFacts(Collection collection) { -// performDeletion(collection); - collection.clear(); - try { - store.clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } - } - - @SuppressWarnings("unused") - private void performDeletion(Collection collection) { - Utility.logInfo("Remove all rules, idb facts and added staff..."); - Timer timer = new Timer(); - TupleIterator iter = null; - try { - UpdateType ut = UpdateType.ScheduleForDeletion; - for (int[] t: collection) - store.addTriplesByResourceIDs(t, ut); - - try { - iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); - for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { - int[] triple = new int[3]; - for (int i = 0; i < 3; ++i) - triple[i] = iter.getResourceID(i); - store.addTriplesByResourceIDs(triple, ut); - } - } finally { - if (iter != null) iter.dispose(); - iter = null; - } - store.applyReasoning(true); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } - Utility.logInfo("Time for deletion: " + timer.duration()); - } + parameters.m_expandEquality = true; + return iter; + } + + public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { + return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); + } + + public String getUnusedRules(Collection clauses, boolean toUpdate) { + DLClause clause; + for(Iterator iter = clauses.iterator(); iter.hasNext(); ) { + if(materialisedRules.contains(clause = iter.next())) + iter.remove(); + else if(toUpdate) materialisedRules.add(clause); + } + + if(clauses.isEmpty()) return null; + + return Program.toString(clauses); + } + + public void outputMaterialisedRules() { + System.out.println(DLClauseHelper.toString(materialisedRules)); + } + + public void outputAnswers(String query) { + TupleIterator iter = null; + try { + iter = internal_evaluate(query); + System.out.println(query); + int arity = iter.getArity(); + for(long multi = iter.open(); multi != 0; multi = iter.getNext()) { + for(int i = 0; i < arity; ++i) + System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); + System.out.println(); + } + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + if(iter != null) iter.dispose(); + } + } + + public void outputInstance4UnaryPredicate(String iri) { + outputAnswers("select ?x where { ?x " + + " <" + + iri + + "> .}"); + } + + public void outputSubjects(String p, String o) { + outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); + } + + public void outputObjects(String s, String p) { + outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); + } + + public void outputIDBFacts() { + TupleIterator iter = null; + try { + iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); + for(long multi = iter.open(); multi != 0; multi = iter.getNext()) { + for(int i = 0; i < 3; ++i) + System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); + System.out.println(); + } + } catch(JRDFStoreException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } finally { + if(iter != null) iter.dispose(); + } + + } + + public void outputType4Individual(String iri) { + outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); + } + + public int getSameAsNumber() { + TupleIterator iter = null; + int counter = 0; + try { + iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); + for(long multi = iter.open(); multi != 0; multi = iter.getNext()) + if(iter.getResourceID(0) != iter.getResourceID(1)) + ++counter; + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + if(iter != null) iter.dispose(); + } + return counter; + } + + public UFS getEqualityGroups(boolean reuse) { + if(reuse && equalityGroups != null) return equalityGroups; + + equalityGroups = new UFS(); + + TupleIterator answers = null; + try { + Timer t = new Timer(); + answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }"); + for(long multi = answers.open(); multi != 0; multi = answers.getNext()) { + if(answers.getResourceID(0) != answers.getResourceID(1)) + equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); + } + Utility.logInfo("@Time to group individuals by equality: " + t.duration()); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + if(answers != null) answers.dispose(); + } + + return equalityGroups; + } + + public void clearRulesAndIDBFacts(Collection collection) { +// performDeletion(collection); + collection.clear(); + try { + store.clearRulesAndMakeFactsExplicit(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + } + + protected void outputClassAssertions(String filename) { + TupleIterator allTuples = null; + boolean redirect = false; + try { + allTuples = + getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); + redirect = Utility.redirectCurrentOut(filename); + for(long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) + System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager + .getQuotedTerm(allTuples.getResource(1))); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } finally { + if(redirect) Utility.closeCurrentOut(); + if(allTuples != null) allTuples.dispose(); + } + } + + @SuppressWarnings("unused") + private void performDeletion(Collection collection) { + Utility.logInfo("Remove all rules, idb facts and added staff..."); + Timer timer = new Timer(); + TupleIterator iter = null; + try { + UpdateType ut = UpdateType.ScheduleForDeletion; + for(int[] t : collection) + store.addTriplesByResourceIDs(t, ut); + + try { + iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); + for(long multi = iter.open(); multi != 0; multi = iter.getNext()) { + int[] triple = new int[3]; + for(int i = 0; i < 3; ++i) + triple[i] = iter.getResourceID(i); + store.addTriplesByResourceIDs(triple, ut); + } + } finally { + if(iter != null) iter.dispose(); + iter = null; + } + store.applyReasoning(true); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + Utility.logInfo("Time for deletion: " + timer.duration()); + } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java index f068164..98f0c35 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java @@ -12,6 +12,7 @@ import uk.ac.ox.cs.pagoda.query.AnswerTuples; import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp; import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import java.io.File; import java.io.FileNotFoundException; @@ -19,82 +20,90 @@ import java.nio.file.Paths; import java.util.Set; public class KarmaQueryEngine extends RDFoxQueryEngine { - - private MyKarma reasoner = null; - - String karmaDataFile = null, karmaRuleFile = null; - - public KarmaQueryEngine(String name) { - super(name); - + + String karmaDataFile = null, karmaRuleFile = null; + private MyKarma reasoner = null; + + public KarmaQueryEngine(String name) { + super(name); + // int Base = 1 << 6; // int index = (new Random().nextInt() % Base + Base) % Base; // karmaDataFile = "karma_data" + index + ".ttl"; // karmaRuleFile = "karma_rule" + index + ".dlog"; - karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString(); - karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString(); - - reasoner = new MyKarma(); - } - - public MyKarma getReasoner() { - return reasoner; - } - - public void processOntology(OWLOntology elhoOntology) { - try { - OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile)); - } catch (IllegalInputOntologyException e) { - e.printStackTrace(); - } - } - - @Override - public void dispose() { - reasoner.dispose(); - } - - @Override - public AnswerTuples evaluate(String queryText) { - return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null); - } - - @Override - public AnswerTuples evaluate(String queryText, String[] answerVars) { - return evaluate(queryText, answerVars, null); - } - - public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) { - return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples); - } - - public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) { - KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?")); - reasoner.setConcurrence(false); - ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery(); - if (cq == null) return null; - Set answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:")); - return new AnswerTuplesImp(answerVars, answers); - } - - @Override - public DataStore getDataStore() { - return reasoner.getStore(); - } - - public void initialiseKarma() { - try { - reasoner.initializeData(new File(karmaDataFile)); - reasoner.materialise(new File(karmaRuleFile)); - - File tmp; - if (karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete(); - if (karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } - } + karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString(); + karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString(); + + reasoner = new MyKarma(); + } + + public MyKarma getReasoner() { + if(isDisposed()) throw new DisposedException(); + return reasoner; + } + + public void processOntology(OWLOntology elhoOntology) { + if(isDisposed()) throw new DisposedException(); + try { + OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile)); + } catch(IllegalInputOntologyException e) { + e.printStackTrace(); + } + } + + @Override + public void dispose() { + super.dispose(); + reasoner.dispose(); + } + + @Override + public AnswerTuples evaluate(String queryText) { + if(isDisposed()) throw new DisposedException(); + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null); + } + + @Override + public AnswerTuples evaluate(String queryText, String[] answerVars) { + if(isDisposed()) throw new DisposedException(); + return evaluate(queryText, answerVars, null); + } + + public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) { + if(isDisposed()) throw new DisposedException(); + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples); + } + + public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) { + if(isDisposed()) throw new DisposedException(); + KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?")); + reasoner.setConcurrence(false); + ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery(); + if(cq == null) return null; + Set answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:")); + return new AnswerTuplesImp(answerVars, answers); + } + + @Override + public DataStore getDataStore() { + if(isDisposed()) throw new DisposedException(); + return reasoner.getStore(); + } + + public void initialiseKarma() { + if(isDisposed()) throw new DisposedException(); + try { + reasoner.initializeData(new File(karmaDataFile)); + reasoner.materialise(new File(karmaRuleFile)); + + File tmp; + if(karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete(); + if(karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete(); + } catch(FileNotFoundException e) { + e.printStackTrace(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java index dd71809..1e8181f 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java @@ -3,15 +3,14 @@ package uk.ac.ox.cs.pagoda.reasoner.light; import org.semanticweb.HermiT.model.Constant; import org.semanticweb.HermiT.model.Individual; import org.semanticweb.HermiT.model.Term; - -import uk.ac.ox.cs.pagoda.query.AnswerTuple; -import uk.ac.ox.cs.pagoda.query.AnswerTuples; -import uk.ac.ox.cs.pagoda.util.Utility; import uk.ac.ox.cs.JRDFox.JRDFStoreException; import uk.ac.ox.cs.JRDFox.model.GroundTerm; import uk.ac.ox.cs.JRDFox.store.TupleIterator; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.util.Utility; -public class RDFoxAnswerTuples implements AnswerTuples { +public class RDFoxAnswerTuples extends AnswerTuples { long multi; TupleIterator m_iter; @@ -22,7 +21,18 @@ public class RDFoxAnswerTuples implements AnswerTuples { m_iter = iter; reset(); } - + + public static Term getHermitTerm(GroundTerm t) { + if(t instanceof uk.ac.ox.cs.JRDFox.model.Individual) { + uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t; + return Individual.create(individual.getIRI()); + } + else { + uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t); + return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI()); + } + } + @Override public boolean isValid() { return multi != 0; @@ -34,7 +44,7 @@ public class RDFoxAnswerTuples implements AnswerTuples { return m_iter.getArity(); } catch (JRDFStoreException e) { e.printStackTrace(); - return -1; + return -1; } } @@ -44,15 +54,12 @@ public class RDFoxAnswerTuples implements AnswerTuples { multi = m_iter.getNext(); } catch (JRDFStoreException e) { e.printStackTrace(); - } + } } @Override public void dispose() { - m_iter.dispose(); - } - - protected void finalize() { + super.dispose(); m_iter.dispose(); } @@ -85,16 +92,9 @@ public class RDFoxAnswerTuples implements AnswerTuples { public String[] getAnswerVariables() { return m_answerVars; } - - public static Term getHermitTerm(GroundTerm t) { - if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual) { - uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t; - return Individual.create(individual.getIRI()); - } - else { - uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t); - return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI()); - } + + protected void finalize() { + m_iter.dispose(); } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java index 61500f5..f835ba9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java @@ -13,119 +13,126 @@ import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import java.io.File; import java.util.Collection; -public abstract class RDFoxQueryEngine implements QueryEngine { - - public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; - protected String name; - protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); +public abstract class RDFoxQueryEngine extends QueryEngine { - public RDFoxQueryEngine(String name) { - this.name = name; - } + public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; + protected String name; + protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); - public static DataStore createDataStore() { - DataStore instance = null; - try { + public RDFoxQueryEngine(String name) { + this.name = name; + } + + public static DataStore createDataStore() { + DataStore instance = null; + try { // instance = new DataStore("par-head-n"); - instance = new DataStore(StoreType.NarrowParallelHead); - instance.setNumberOfThreads(matNoOfThreads); - instance.initialize(); - } catch(JRDFStoreException e) { - e.printStackTrace(); - } - return instance; - } - - public String getName() { - return name; - } - - public abstract DataStore getDataStore(); - - public abstract void dispose(); - - public void importRDFData(String fileName, String importedFile) { - if(importedFile == null || importedFile.isEmpty()) return; - Timer t = new Timer(); - DataStore store = getDataStore(); - try { - long oldTripleCount = store.getTriplesCount(), tripleCount; - for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) { - store.importTurtleFile(new File(file), prefixes); - } - tripleCount = store.getTriplesCount(); - Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); - store.clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } - Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); - } - - public void importDataFromABoxOf(OWLOntology ontology) { - DataStore store = getDataStore(); - try { - long prevTriplesCount = store.getTriplesCount(); - store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); - long loadedTriples = store.getTriplesCount() - prevTriplesCount; - Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) - .size() + " ABox axioms"); - } catch(JRDFStoreException | OWLOntologyCreationException e) { - e.printStackTrace(); - System.exit(1); - } - - } - - public void materialise(String programName, String programText) { - if(programText == null) return; - Timer t = new Timer(); - DataStore store = getDataStore(); - try { - long oldTripleCount = store.getTriplesCount(), tripleCount; + instance = new DataStore(StoreType.NarrowParallelHead); + instance.setNumberOfThreads(matNoOfThreads); + instance.initialize(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + return instance; + } + + public String getName() { + if(isDisposed()) throw new DisposedException(); + return name; + } + + public abstract DataStore getDataStore(); + + public void importRDFData(String fileName, String importedFile) { + if(isDisposed()) throw new DisposedException(); + if(importedFile == null || importedFile.isEmpty()) return; + Timer t = new Timer(); + DataStore store = getDataStore(); + try { + long oldTripleCount = store.getTriplesCount(), tripleCount; + for(String file : importedFile.split(QueryReasoner.ImportDataFileSeparator)) { + store.importTurtleFile(new File(file), prefixes); + } + tripleCount = store.getTriplesCount(); + Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); + store.clearRulesAndMakeFactsExplicit(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); + } + + public void importDataFromABoxOf(OWLOntology ontology) { + if(isDisposed()) throw new DisposedException(); + DataStore store = getDataStore(); + try { + long prevTriplesCount = store.getTriplesCount(); + store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); + long loadedTriples = store.getTriplesCount() - prevTriplesCount; + Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) + .size() + " ABox axioms"); + } catch(JRDFStoreException | OWLOntologyCreationException e) { + e.printStackTrace(); + System.exit(1); + } + + } + + public void materialise(String programName, String programText) { + if(isDisposed()) throw new DisposedException(); + if(programText == null) return; + Timer t = new Timer(); + DataStore store = getDataStore(); + try { + long oldTripleCount = store.getTriplesCount(), tripleCount; // store.addRules(new String[] {programText}); - store.importRules(programText); - store.applyReasoning(); - tripleCount = store.getTriplesCount(); - Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); - store.clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } - Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds."); - } - - @Override - public void evaluate(Collection queryTexts, String answerFile) { - if (queryTexts == null) - return ; - - int queryID = 0; - AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); - AnswerTuples answerTuples; - Timer t = new Timer(); - try { - for (String query: queryTexts) { - t.reset(); - answerTuples = null; - try { - answerTuples = evaluate(query); - Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); - answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); - } finally { - if (answerTuples != null) answerTuples.dispose(); - } - } - } finally { - answerWriter.close(); - } - - Utility.logDebug("done computing query answers by RDFox."); - - } + store.importRules(programText); + store.applyReasoning(); + tripleCount = store.getTriplesCount(); + Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); + store.clearRulesAndMakeFactsExplicit(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds."); + } + + @Override + public void evaluate(Collection queryTexts, String answerFile) { + if(isDisposed()) throw new DisposedException(); + if(queryTexts == null) + return; + + int queryID = 0; + AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); + AnswerTuples answerTuples; + Timer t = new Timer(); + try { + for(String query : queryTexts) { + t.reset(); + answerTuples = null; + try { + answerTuples = evaluate(query); + Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); + answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); + } finally { + if(answerTuples != null) answerTuples.dispose(); + } + } + } finally { + answerWriter.close(); + } + + Utility.logDebug("done computing query answers by RDFox."); + } + @Override + public void dispose() { + super.dispose(); + } } -- cgit v1.2.3