From b3b822d187a6402a39d30e471fe90a5dfad64312 Mon Sep 17 00:00:00 2001 From: RncLsn Date: Wed, 3 Jun 2015 15:21:30 +0100 Subject: Before reintroducing extended queries. --- src/uk/ac/ox/cs/pagoda/endomorph/Endomorph.java | 29 +- .../LimitedSkolemisationApplication.java | 8 +- .../pagoda/multistage/MultiStageQueryEngine.java | 34 ++- src/uk/ac/ox/cs/pagoda/query/QueryRecord.java | 2 +- .../ac/ox/cs/pagoda/query/rollup/QueryGraph.java | 222 ++++++-------- .../ox/cs/pagoda/reasoner/ConsistencyManager.java | 13 + .../ox/cs/pagoda/reasoner/ELHOQueryReasoner.java | 2 +- .../ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | 2 +- .../ac/ox/cs/pagoda/reasoner/HermiTReasoner.java | 4 +- .../ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | 93 ++++-- src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java | 14 +- .../ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java | 2 +- .../ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | 2 +- src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java | 12 +- .../ox/cs/pagoda/reasoner/full/HermitChecker.java | 330 +++++++++++---------- .../cs/pagoda/reasoner/light/BasicQueryEngine.java | 38 +++ .../LimitedSkolemisationApproximator.java | 22 +- .../ox/cs/pagoda/summary/HermitSummaryFilter.java | 59 ++-- src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java | 4 + src/uk/ac/ox/cs/pagoda/util/Utility.java | 6 +- 20 files changed, 504 insertions(+), 394 deletions(-) (limited to 'src/uk') diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/Endomorph.java b/src/uk/ac/ox/cs/pagoda/endomorph/Endomorph.java index e6b50f9..cde0895 100644 --- a/src/uk/ac/ox/cs/pagoda/endomorph/Endomorph.java +++ b/src/uk/ac/ox/cs/pagoda/endomorph/Endomorph.java @@ -1,11 +1,8 @@ package uk.ac.ox.cs.pagoda.endomorph; -import java.util.Collection; -import java.util.LinkedList; - import org.semanticweb.owlapi.model.OWLOntology; - -import uk.ac.ox.cs.pagoda.endomorph.plan.*; +import uk.ac.ox.cs.pagoda.endomorph.plan.CheckPlan; +import uk.ac.ox.cs.pagoda.endomorph.plan.OpenEndPlan; import uk.ac.ox.cs.pagoda.query.AnswerTuple; import uk.ac.ox.cs.pagoda.query.AnswerTuples; import uk.ac.ox.cs.pagoda.query.QueryRecord; @@ -14,8 +11,12 @@ import uk.ac.ox.cs.pagoda.summary.Graph; import uk.ac.ox.cs.pagoda.summary.NodeTuple; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; -public class Endomorph implements Checker { +import java.util.Collection; +import java.util.LinkedList; + +public class Endomorph extends Checker { Checker fullReasoner; DependencyGraph dGraph; @@ -36,6 +37,8 @@ public class Endomorph implements Checker { @Override public int check(AnswerTuples answerTuples) { + if(isDisposed()) throw new DisposedException(); + Collection nodes = new LinkedList(); int counter = 0; @@ -62,33 +65,47 @@ public class Endomorph implements Checker { } public OWLOntology getOntology() { + if(isDisposed()) throw new DisposedException(); + return m_record.getRelevantOntology(); } @Override public boolean check(AnswerTuple answerTuple) { + if(isDisposed()) throw new DisposedException(); + return fullReasoner.check(answerTuple); } @Override public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + return fullReasoner.isConsistent(); } @Override public void dispose() { + super.dispose(); + fullReasoner.dispose(); } public Graph getGraph() { + if(isDisposed()) throw new DisposedException(); + return graph; } public Checker getChecker() { + if(isDisposed()) throw new DisposedException(); + return fullReasoner; } public DependencyGraph getDependencyGraph() { + if(isDisposed()) throw new DisposedException(); + return dGraph; } diff --git a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java index b548d39..efcf9ba 100644 --- a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java +++ b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java @@ -9,8 +9,12 @@ public class LimitedSkolemisationApplication extends RestrictedApplication { public static final int MAX_DEPTH = 1; - public LimitedSkolemisationApplication(Program program, BottomStrategy upperBottom) { + public LimitedSkolemisationApplication(Program program, BottomStrategy upperBottom, int maxDepth) { super(program, upperBottom); - m_approxExist = new LimitedSkolemisationApproximator(MAX_DEPTH); + m_approxExist = new LimitedSkolemisationApproximator(maxDepth); + } + + public LimitedSkolemisationApplication(Program program, BottomStrategy upperBottom) { + this(program, upperBottom, MAX_DEPTH); } } diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java index e1be6d2..0987279 100644 --- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java @@ -49,22 +49,28 @@ public class MultiStageQueryEngine extends StageQueryEngine { RestrictedApplication program = new RestrictedApplication(generalProgram, dProgram.getUpperBottomStrategy()); Treatment treatment = new Pick4NegativeConceptNaive(this, program); int ret = materialise(program, treatment, gap); - treatment.dispose(); // does nothing + treatment.dispose(); // FIXME does nothing return ret; } - public int materialise4SpecificQuery(Program generalProgram, QueryRecord record, BottomStrategy upperBottom) { + /** + * delta-chase + */ + public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap, int maxTermDepth) { if(isDisposed()) throw new DisposedException(); - RestrictedApplication program = new RestrictedApplication(generalProgram, upperBottom); - Treatment treatment = new Pick4NegativeConceptQuerySpecific(this, program, record); - int ret = materialise(program, treatment, null); - treatment.dispose(); - return ret; + materialise("lower program", dProgram.getLower().toString()); + Program generalProgram = dProgram.getGeneral(); + LimitedSkolemisationApplication program = + new LimitedSkolemisationApplication(generalProgram, + dProgram.getUpperBottomStrategy(), + maxTermDepth); + Treatment treatment = new Pick4NegativeConceptNaive(this, program); + return materialise(program, treatment, gap); } /** - * delta-chase + * delta-chase with fixed mad term depth */ @Override public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap) { @@ -78,6 +84,16 @@ public class MultiStageQueryEngine extends StageQueryEngine { return materialise(program, treatment, gap); } + public int materialise4SpecificQuery(Program generalProgram, QueryRecord record, BottomStrategy upperBottom) { + if(isDisposed()) throw new DisposedException(); + + RestrictedApplication program = new RestrictedApplication(generalProgram, upperBottom); + Treatment treatment = new Pick4NegativeConceptQuerySpecific(this, program, record); + int ret = materialise(program, treatment, null); + treatment.dispose(); // FIXME does nothing + return ret; + } + private int materialise(MultiStageUpperProgram program, Treatment treatment, GapByStore4ID gap) { if(gap != null) treatment.addAdditionalGapTuples(); @@ -128,7 +144,7 @@ public class MultiStageQueryEngine extends StageQueryEngine { if(!isValid()) { if(iteration == 1) { - Utility.logInfo("The ontology is inconsistent."); + Utility.logDebug("The ontology is inconsistent."); return -1; } Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds."); diff --git a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java index 516a461..3edb2c3 100644 --- a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java +++ b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java @@ -613,7 +613,7 @@ public class QueryRecord extends Disposable { } // } - TupleBuilder result = new TupleBuilder(); + TupleBuilder result = new TupleBuilder<>(); result.append(extendedSelect + " " + fullyExtendedWhere); extra.setLength(0); diff --git a/src/uk/ac/ox/cs/pagoda/query/rollup/QueryGraph.java b/src/uk/ac/ox/cs/pagoda/query/rollup/QueryGraph.java index 58d7add..a567699 100644 --- a/src/uk/ac/ox/cs/pagoda/query/rollup/QueryGraph.java +++ b/src/uk/ac/ox/cs/pagoda/query/rollup/QueryGraph.java @@ -1,49 +1,11 @@ package uk.ac.ox.cs.pagoda.query.rollup; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; - -import org.semanticweb.HermiT.model.Atom; -import org.semanticweb.HermiT.model.AtomicConcept; -import org.semanticweb.HermiT.model.AtomicRole; -import org.semanticweb.HermiT.model.Constant; -import org.semanticweb.HermiT.model.Individual; -import org.semanticweb.HermiT.model.Term; -import org.semanticweb.HermiT.model.Variable; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLClassExpressionVisitorEx; -import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; -import org.semanticweb.owlapi.model.OWLDataExactCardinality; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDataHasValue; -import org.semanticweb.owlapi.model.OWLDataMaxCardinality; -import org.semanticweb.owlapi.model.OWLDataMinCardinality; -import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLLiteral; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; -import org.semanticweb.owlapi.model.OWLObjectComplementOf; -import org.semanticweb.owlapi.model.OWLObjectExactCardinality; -import org.semanticweb.owlapi.model.OWLObjectHasSelf; -import org.semanticweb.owlapi.model.OWLObjectHasValue; -import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; -import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; -import org.semanticweb.owlapi.model.OWLObjectMinCardinality; -import org.semanticweb.owlapi.model.OWLObjectOneOf; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.owlapi.model.OWLOntology; - +import org.semanticweb.HermiT.model.*; +import org.semanticweb.owlapi.model.*; import uk.ac.ox.cs.pagoda.util.Namespace; +import java.util.*; + public class QueryGraph { Set freeVars = new HashSet(); @@ -82,168 +44,170 @@ public class QueryGraph { rollup(); } - - private void updateExistentiallyVariables(Variable argumentVariable) { - if (freeVars.contains(argumentVariable)) return ; - existVars.add(argumentVariable); - } public void createEdges(Term u, AtomicRole r, Term v) { - if (ontology.containsDataPropertyInSignature(IRI.create(r.getIRI()))) { + if(ontology.containsDataPropertyInSignature(IRI.create(r.getIRI()))) { // edges.add(u, new DataEdge(r, v)); - Constant c = (Constant) v; - OWLLiteral l = factory.getOWLLiteral(c.getLexicalForm(), c.getDatatypeURI()); + Constant c = (Constant) v; + OWLLiteral l = factory.getOWLLiteral(c.getLexicalForm(), c.getDatatypeURI()); concepts.add(u, factory.getOWLDataHasValue(factory.getOWLDataProperty(IRI.create(r.getIRI())), l)); } else { boolean rollable = existVars.contains(u) || existVars.contains(v); - + ObjectEdge edge = new ObjectEdge(r, v, false); - if (rollable) { + if(rollable) { rollable_edges.add(u, edge); edge = new ObjectEdge(r, u, true); rollable_edges.add(v, edge); } else edges.add(u, edge); - + } } + + public Set getPropertyAssertions(Map assignment) { + OWLIndividual sub, obj; + Set axioms = new HashSet(); + for(Map.Entry> entry : edges.map.entrySet()) { + sub = factory.getOWLNamedIndividual(IRI.create(getIndividual(entry.getKey(), assignment).getIRI())); + for(ObjectEdge edge : entry.getValue()) { + Individual individual = getIndividual(edge.v, assignment); + String iri = individual.getIRI(); + obj = factory.getOWLNamedIndividual(IRI.create(iri)); + axioms.add(factory.getOWLObjectPropertyAssertionAxiom(edge.p, sub, obj)); + } + } + return axioms; + } + + public Set getAssertions(Map assignment) { + if(!rollable_edges.isEmpty()) return null; + + OWLIndividual sub; + Visitor visitor = new Visitor(factory, assignment); + Set axioms = getPropertyAssertions(assignment); + for(Map.Entry> entry : concepts.map.entrySet()) { + if(existVars.contains(entry.getKey())) continue; + sub = factory.getOWLNamedIndividual(IRI.create(getIndividual(entry.getKey(), assignment).getIRI())); + for(OWLClassExpression clsExp : entry.getValue()) { + axioms.add(factory.getOWLClassAssertionAxiom(clsExp.accept(visitor), sub)); + } + } + return axioms; + } + + private void updateExistentiallyVariables(Variable argumentVariable) { + if(freeVars.contains(argumentVariable)) return; + existVars.add(argumentVariable); + } private void rollup() { for (boolean updated = true; updated; ) { - updated = false; - - Set set; + updated = false; + + Set set; for (Variable var: existVars) { if ((set = rollable_edges.map.get(var)) != null && set.size() == 1) { - updated = true; + updated = true; ObjectEdge edge = set.iterator().next(); rollupEdge(edge.v, edge.p.getInverseProperty().getSimplified(), var, true); set.clear(); } } - if (updated) continue; - + if(updated) continue; + for (Variable var: existVars) { - set = rollable_edges.map.get(var); - if (set == null) continue; + set = rollable_edges.map.get(var); + if(set == null) continue; for (Iterator iter = set.iterator(); iter.hasNext(); ) { - ObjectEdge edge = iter.next(); + ObjectEdge edge = iter.next(); if (constants.contains(edge.v) || freeVars.contains(edge.v)) { - updated = true; + updated = true; rollupEdge(var, edge.p, edge.v, false); iter.remove(); } } } } - + } private void rollupEdge(Term u, OWLObjectPropertyExpression op, Term v, boolean inverse) { if (existVars.contains(v)) { - concepts.add(u, factory.getOWLObjectSomeValuesFrom(op, factory.getOWLObjectIntersectionOf(concepts.get(v)))); + concepts.add(u, factory.getOWLObjectSomeValuesFrom(op, factory.getOWLObjectIntersectionOf(concepts.get(v)))); } else { - OWLIndividual obj = getOWLIndividual(v); + OWLIndividual obj = getOWLIndividual(v); concepts.add(u, factory.getOWLObjectHasValue(op, obj)); } - - if (inverse) + + if(inverse) removeRollableEdge(u, op, v); - else + else removeRollableEdge(v, op.getInverseProperty().getSimplified(), u); } - + private void removeRollableEdge(Term u, OWLObjectPropertyExpression op, Term v) { Set set = rollable_edges.get(u); - ObjectEdge edge; + ObjectEdge edge; if (set != null) for (Iterator iter = set.iterator(); iter.hasNext(); ) { - edge = iter.next(); - if (edge.p.equals(op) && edge.v.equals(v)) iter.remove(); + edge = iter.next(); + if(edge.p.equals(op) && edge.v.equals(v)) iter.remove(); } } - + OWLNamedIndividual getOWLIndividual(Term t) { if (freeVars.contains(t)) return new VariableIndividual((Variable) t); else if (t instanceof Variable) - return null; - else - return factory.getOWLNamedIndividual(IRI.create(((Individual) t).getIRI())); + return null; + else + return factory.getOWLNamedIndividual(IRI.create(((Individual) t).getIRI())); + } + + private Individual getIndividual(Term key, Map assignment) { + if(key instanceof Individual) + return (Individual) key; + else + return (Individual) assignment.get(key); } class ObjectEdge { OWLObjectPropertyExpression p; - Term v; - + Term v; + public ObjectEdge(AtomicRole r, Term t, boolean inverse) { - p = factory.getOWLObjectProperty(IRI.create(r.getIRI())); - if (inverse) p = p.getInverseProperty(); - v = t; + p = factory.getOWLObjectProperty(IRI.create(r.getIRI())); + if(inverse) p = p.getInverseProperty(); + v = t; } } class MultiMap { - - HashMap> map = new HashMap>(); - - void add(K key, V value) { - Set list = map.get(key); - if (list == null) - map.put(key, list = new HashSet()); - list.add(value); - } + + HashMap> map = new HashMap>(); public Set get(K v) { - return map.get(v); + return map.get(v); } public boolean isEmpty() { - for (Map.Entry> entry: map.entrySet()) - if (!entry.getValue().isEmpty()) - return false; + for(Map.Entry> entry : map.entrySet()) + if(!entry.getValue().isEmpty()) + return false; return true; } - - } - public Set getPropertyAssertions(Map assignment) { - OWLIndividual sub, obj; - Set axioms = new HashSet(); - for (Map.Entry> entry: edges.map.entrySet()) { - sub = factory.getOWLNamedIndividual(IRI.create(getIndividual(entry.getKey(), assignment).getIRI())); - for (ObjectEdge edge: entry.getValue()) { - obj = factory.getOWLNamedIndividual(IRI.create(getIndividual(edge.v, assignment).getIRI())); - axioms.add(factory.getOWLObjectPropertyAssertionAxiom(edge.p, sub, obj)); - } - } - return axioms; - } - - public Set getAssertions(Map assignment) { - if (!rollable_edges.isEmpty()) return null; - - OWLIndividual sub; - Visitor visitor = new Visitor(factory, assignment); - Set axioms = getPropertyAssertions(assignment); - for (Map.Entry> entry: concepts.map.entrySet()) { - if (existVars.contains(entry.getKey())) continue; - sub = factory.getOWLNamedIndividual(IRI.create(getIndividual(entry.getKey(), assignment).getIRI())); - for (OWLClassExpression clsExp: entry.getValue()) { - axioms.add(factory.getOWLClassAssertionAxiom(clsExp.accept(visitor), sub)); - } + void add(K key, V value) { + Set list = map.get(key); + if(list == null) + map.put(key, list = new HashSet()); + list.add(value); } - return axioms; - } - private Individual getIndividual(Term key, Map assignment) { - if (key instanceof Individual) - return (Individual) key; - else - return (Individual) assignment.get(key); } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java index 453b5ca..3fd2fbd 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java @@ -23,6 +23,7 @@ import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; import uk.ac.ox.cs.pagoda.util.disposable.Disposable; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import java.util.LinkedList; @@ -49,6 +50,8 @@ public class ConsistencyManager extends Disposable { } public void extractBottomFragment() { + if(isDisposed()) throw new DisposedException(); + if(fragmentExtracted) return; fragmentExtracted = true; @@ -141,10 +144,14 @@ public class ConsistencyManager extends Disposable { } public QueryRecord[] getQueryRecords() { + if(isDisposed()) throw new DisposedException(); + return botQueryRecords; } boolean checkRLLowerBound() { + if(isDisposed()) throw new DisposedException(); + fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); AnswerTuples iter = null; @@ -175,6 +182,8 @@ public class ConsistencyManager extends Disposable { // } boolean checkELLowerBound() { + if(isDisposed()) throw new DisposedException(); + fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord .getAnswerVariables())); if(fullQueryRecord.getNoOfSoundAnswers() > 0) { @@ -185,6 +194,8 @@ public class ConsistencyManager extends Disposable { } boolean checkUpper(BasicQueryEngine upperStore) { + if(isDisposed()) throw new DisposedException(); + if(upperStore != null) { AnswerTuples tuples = null; try { @@ -202,6 +213,8 @@ public class ConsistencyManager extends Disposable { } boolean check() { + if(isDisposed()) throw new DisposedException(); + // if (!checkRLLowerBound()) return false; // if (!checkELLowerBound()) return false; // if (checkLazyUpper()) return true; diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java index 0a151bc..cc2c4c0 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java @@ -70,7 +70,7 @@ class ELHOQueryReasoner extends QueryReasoner { @Override public boolean preprocess() { if(isDisposed()) throw new DisposedException(); - elLowerStore.importRDFData("data", importedData.toString()); + elLowerStore.importRDFData("data", getImportedData()); String rlLowerProgramText = program.toString(); // program.save(); elLowerStore.materialise("lower program", rlLowerProgramText); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java index 771190e..a56a793 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java @@ -129,7 +129,7 @@ class ELHOUQueryReasoner extends QueryReasoner { @Override public boolean preprocess() { if(isDisposed()) throw new DisposedException(); - String name = "data", datafile = importedData.toString(); + String name = "data", datafile = getImportedData(); String lowername = "lower program"; String rlLowerProgramText = program.getLower().toString(); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java index 78b9a0b..15eb9e8 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java @@ -46,7 +46,7 @@ class HermiTReasoner extends QueryReasoner { if(isDisposed()) throw new DisposedException(); OWLOntology tbox = onto; try { - onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); + onto = OWLHelper.getImportedOntology(tbox, getImportedData().split(ImportDataFileSeparator)); importedOntologyPath = OWLHelper.getOntologyPath(onto); } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) { e.printStackTrace(); @@ -55,7 +55,7 @@ class HermiTReasoner extends QueryReasoner { DatalogProgram datalogProgram = new DatalogProgram(tbox, false); importData(datalogProgram.getAdditionalDataFile()); upperStore = new MultiStageQueryEngine("rl-upper", false); - upperStore.importRDFData("data", importedData.toString()); + upperStore.importRDFData("data", getImportedData()); GapByStore4ID gap = new GapByStore4ID(upperStore); upperStore.materialiseFoldedly(datalogProgram, gap); gap.clear(); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index acdb8a3..b4e2f5a 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java @@ -29,20 +29,22 @@ import java.util.Collection; class MyQueryReasoner extends QueryReasoner { OWLOntology ontology; + OWLOntology elho_ontology; DatalogProgram program; BasicQueryEngine rlLowerStore = null; - BasicQueryEngine lazyUpperStore = null; - // MultiStageQueryEngine limitedSkolemUpperStore; - OWLOntology elho_ontology; KarmaQueryEngine elLowerStore = null; - BasicQueryEngine trackingStore = null; + MultiStageQueryEngine lazyUpperStore = null; + MultiStageQueryEngine trackingStore = null; TrackingRuleEncoder encoder; + private boolean equalityTag; private Timer t = new Timer(); + private Collection predicatesWithGap = null; - private SatisfiabilityStatus satisfiable; + private ConsistencyStatus isConsistent; private ConsistencyManager consistency = new ConsistencyManager(this); + private boolean useSkolemisation = false; // now only debugging public MyQueryReasoner() { setup(true); @@ -92,11 +94,15 @@ class MyQueryReasoner extends QueryReasoner { t.reset(); Utility.logInfo("Preprocessing (and checking satisfiability)..."); - String name = "data", datafile = importedData.toString(); + String name = "data", datafile = getImportedData(); rlLowerStore.importRDFData(name, datafile); rlLowerStore.materialise("lower program", program.getLower().toString()); // program.getLower().save(); - if(!consistency.checkRLLowerBound()) return false; + if(!consistency.checkRLLowerBound()) { + Utility.logDebug("time for satisfiability checking: " + t.duration()); + isConsistent = ConsistencyStatus.INCONSISTENT; + return false; + } Utility.logDebug("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); @@ -105,20 +111,28 @@ class MyQueryReasoner extends QueryReasoner { elLowerStore.materialise("saturate named individuals", originalMarkProgram); elLowerStore.materialise("lower program", program.getLower().toString()); elLowerStore.initialiseKarma(); - if(!consistency.checkELLowerBound()) return false; + if(!consistency.checkELLowerBound()) { + Utility.logDebug("time for satisfiability checking: " + t.duration()); + isConsistent = ConsistencyStatus.INCONSISTENT; + return false; + } if(lazyUpperStore != null) { lazyUpperStore.importRDFData(name, datafile); lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); int tag = lazyUpperStore.materialiseRestrictedly(program, null); - if(tag != 1) { + if(tag == -1) { + Utility.logDebug("time for satisfiability checking: " + t.duration()); + isConsistent = ConsistencyStatus.INCONSISTENT; + return false; + } + else if(tag != 1) { lazyUpperStore.dispose(); lazyUpperStore = null; } - if(tag == -1) return false; } if(consistency.checkUpper(lazyUpperStore)) { - satisfiable = SatisfiabilityStatus.SATISFIABLE; + isConsistent = ConsistencyStatus.CONSISTENT; Utility.logDebug("time for satisfiability checking: " + t.duration()); } @@ -140,13 +154,12 @@ class MyQueryReasoner extends QueryReasoner { // encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); // encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); + // TODO add consistency check by Skolem-upper-bound + if(!isConsistent()) return false; consistency.extractBottomFragment(); - consistency.dispose(); - - program.dispose(); return true; } @@ -154,11 +167,19 @@ class MyQueryReasoner extends QueryReasoner { @Override public boolean isConsistent() { if(isDisposed()) throw new DisposedException(); - if(satisfiable == SatisfiabilityStatus.UNCHECKED) { - satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE; - Utility.logInfo("time for satisfiability checking: " + t.duration()); + + if(isConsistent == ConsistencyStatus.UNCHECKED) { + isConsistent = consistency.check() ? ConsistencyStatus.CONSISTENT : ConsistencyStatus.INCONSISTENT; + Utility.logDebug("time for satisfiability checking: " + t.duration()); + } + if(isConsistent == ConsistencyStatus.CONSISTENT) { + Utility.logInfo("The ontology is consistent!"); + return true; + } + else { + Utility.logInfo("The ontology is inconsistent!"); + return false; } - return satisfiable == SatisfiabilityStatus.SATISFIABLE; } @Override @@ -169,9 +190,9 @@ class MyQueryReasoner extends QueryReasoner { return; OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); -// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); + queryRecord.saveRelevantOntology("/home/alessandro/Desktop/fragment_query" + queryRecord.getQueryID() + ".owl"); - if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) + if(useSkolemisation && querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) return; Timer t = new Timer(); @@ -207,12 +228,14 @@ class MyQueryReasoner extends QueryReasoner { if(lazyUpperStore != null) lazyUpperStore.dispose(); if(elLowerStore != null) elLowerStore.dispose(); if(trackingStore != null) trackingStore.dispose(); -// if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); + if(consistency != null) consistency.dispose(); + if(program != null) program.dispose(); } private void setup(boolean considerEqualities) { if(isDisposed()) throw new DisposedException(); - satisfiable = SatisfiabilityStatus.UNCHECKED; + + isConsistent = ConsistencyStatus.UNCHECKED; this.equalityTag = considerEqualities; rlLowerStore = new BasicQueryEngine("rl-lower-bound"); @@ -239,7 +262,7 @@ class MyQueryReasoner extends QueryReasoner { */ private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord, Tuple extendedQuery, Step step) { - + t.reset(); if(queryRecord.hasNonAnsDistinguishedVariables()) queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables()); else @@ -254,6 +277,7 @@ class MyQueryReasoner extends QueryReasoner { } private boolean checkGapAnswers(BasicQueryEngine relevantStore, QueryRecord queryRecord) { + t.reset(); Tuple extendedQueries = queryRecord.getExtendedQueryText(); if(queryRecord.hasNonAnsDistinguishedVariables()) checkGapAnswers(relevantStore, queryRecord, extendedQueries.get(0), queryRecord.getAnswerVariables()); @@ -296,8 +320,6 @@ class MyQueryReasoner extends QueryReasoner { } queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration()); - t.reset(); - Tuple extendedQueryTexts = queryRecord.getExtendedQueryText(); Utility.logDebug("Tracking store"); @@ -343,8 +365,6 @@ class MyQueryReasoner extends QueryReasoner { // just statistics int numOfABoxAxioms = relevantOntologySubset.getABoxAxioms(true).size(); int numOfTBoxAxioms = relevantOntologySubset.getAxiomCount() - numOfABoxAxioms; - int originalNumOfABoxAxioms = ontology.getABoxAxioms(true).size(); - int originalNumOfTBoxAxioms = ontology.getAxiomCount() - originalNumOfABoxAxioms; Utility.logInfo("Relevant ontology-subset has been extracted: |ABox|=" + numOfABoxAxioms + ", |TBox|=" + numOfTBoxAxioms); @@ -365,6 +385,7 @@ class MyQueryReasoner extends QueryReasoner { private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { Utility.logInfo("Evaluating semi-Skolemised relevant upper store..."); + t.reset(); DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false @@ -373,16 +394,24 @@ class MyQueryReasoner extends QueryReasoner { relevantStore.importDataFromABoxOf(relevantSubset); - int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); - if(materialisationResult != 1) - throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency + int queryDependentMaxTermDepth = 1; // TODO make it dynamic + int materialisationTag = relevantStore.materialiseSkolemly(relevantProgram, null, + queryDependentMaxTermDepth); + queryRecord.addProcessingTime(Step.L_SKOLEM_UPPER_BOUND, t.duration()); + if(materialisationTag == -1) { + throw new Error("A consistent ontology has turned out to be " + + "inconsistent in the Skolemises-relevant-upper-store"); + } + else if(materialisationTag != 1) { + Utility.logInfo("Semi-Skolemised relevant upper store cannot be employed"); + return false; + } boolean isFullyProcessed = checkGapAnswers(relevantStore, queryRecord); - Utility.logInfo("Semi-Skolemised relevant upper store has been evaluated"); return isFullyProcessed; } - enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} + private enum ConsistencyStatus {CONSISTENT, INCONSISTENT, UNCHECKED} } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java index 962a78f..eab6a1b 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java @@ -26,10 +26,10 @@ public abstract class QueryReasoner extends Disposable { private static final boolean DEFAULT_MULTI_STAGES = true; private static final boolean DEFAULT_EQUALITIES = true; public boolean fullReasoner = this instanceof MyQueryReasoner; - protected StringBuilder importedData = new StringBuilder(); // protected boolean forSemFacet = false; PagodaProperties properties; BufferedWriter answerWriter = null; + private StringBuilder importedData = new StringBuilder(); private QueryManager m_queryManager = new QueryManager(); public static QueryReasoner getInstance(PagodaProperties p) { @@ -208,10 +208,6 @@ public abstract class QueryReasoner extends Disposable { queryRecords.stream().forEach(record -> record.dispose()); } -// public void evaluate(Collection queryRecords) { -// evaluate(queryRecords); -// } - @Override public void dispose() { super.dispose(); @@ -225,11 +221,19 @@ public abstract class QueryReasoner extends Disposable { // Utility.cleanup(); } +// public void evaluate(Collection queryRecords) { +// evaluate(queryRecords); +// } + public QueryManager getQueryManager() { if(isDisposed()) throw new DisposedException(); return m_queryManager; } + protected String getImportedData() { + return importedData.toString(); + } + private void importDataDirectory(File file) { for(File child : file.listFiles()) if(child.isFile()) importDataFile(child); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java index 16e2627..3934498 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java @@ -57,7 +57,7 @@ class RLQueryReasoner extends QueryReasoner { @Override public boolean preprocess() { if(isDisposed()) throw new DisposedException(); - rlLowerStore.importRDFData("data", importedData.toString()); + rlLowerStore.importRDFData("data", getImportedData()); rlLowerStore.materialise("lower program", program.toString()); return isConsistent(); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java index d0712e1..368fbb2 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java @@ -97,7 +97,7 @@ class RLUQueryReasoner extends QueryReasoner { @Override public boolean preprocess() { if(isDisposed()) throw new DisposedException(); - String datafile = importedData.toString(); + String datafile = getImportedData(); rlLowerStore.importRDFData("data", datafile); rlLowerStore.materialise("lower program", program.getLower().toString()); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java b/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java index 849b971..fd620a5 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java @@ -2,14 +2,14 @@ package uk.ac.ox.cs.pagoda.reasoner.full; import uk.ac.ox.cs.pagoda.query.AnswerTuple; import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.util.disposable.Disposable; -public interface Checker { +public abstract class Checker extends Disposable { - public int check(AnswerTuples answers); - - public boolean check(AnswerTuple answer); + public abstract int check(AnswerTuples answers); - public boolean isConsistent(); + public abstract boolean check(AnswerTuple answer); + + public abstract boolean isConsistent(); - public void dispose(); } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java b/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java index 5dcf0f8..35db0f2 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java @@ -1,24 +1,10 @@ package uk.ac.ox.cs.pagoda.reasoner.full; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import org.semanticweb.HermiT.Reasoner; import org.semanticweb.HermiT.model.DLClause; import org.semanticweb.HermiT.model.Term; import org.semanticweb.HermiT.model.Variable; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyManager; - +import org.semanticweb.owlapi.model.*; import uk.ac.ox.cs.pagoda.endomorph.Clique; import uk.ac.ox.cs.pagoda.endomorph.DependencyGraph; import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; @@ -30,216 +16,236 @@ import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; import uk.ac.ox.cs.pagoda.util.Namespace; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; -public class HermitChecker implements Checker { - - protected OWLDataFactory factory; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; - private String queryText; - private DLClause queryClause; +public class HermitChecker extends Checker { - private Reasoner hermit; + protected OWLDataFactory factory; protected String[][] answerVariable; protected OWLOntology ontology; - protected QueryRecord record; - protected QueryGraph qGraph = null; + protected QueryRecord record; + protected QueryGraph qGraph = null; boolean toCheck = true; - + AnswerTuple topAnswerTuple = null, botAnswerTuple = null; + private String queryText; + private DLClause queryClause; + private Reasoner hermit; + private int tag = 0; + private int counter = 0; + private DependencyGraph dGraph = null; public HermitChecker(Checker checker) { if (checker instanceof HermitChecker) { HermitChecker other = (HermitChecker) checker; - factory = other.factory; + factory = other.factory; queryText = other.queryText; - queryClause = other.queryClause; + queryClause = other.queryClause; answerVariable = other.answerVariable; ontology = other.ontology; -// record = other.record; +// record = other.record; } - + hermit = new Reasoner(ontology); } - + public HermitChecker(OWLOntology ontology, QueryRecord record, boolean toCheck) { this.ontology = ontology; - queryText = record.getQueryText(); + queryText = record.getQueryText(); answerVariable = record.getVariables(); - queryClause = record.getClause(); -// this.record = record; - this.toCheck = toCheck; + queryClause = record.getClause(); +// this.record = record; + this.toCheck = toCheck; } - + public HermitChecker(OWLOntology ontology, String queryText) { this.ontology = ontology; - this.queryText = queryText; + this.queryText = queryText; answerVariable = queryText == null ? null : ConjunctiveQueryHelper.getAnswerVariables(queryText); - queryClause = DLClauseHelper.getQuery(queryText, null); -// this.record = null; + queryClause = DLClauseHelper.getQuery(queryText, null); +// this.record = null; + } + + @Override + public int check(AnswerTuples answers) { + if(isDisposed()) throw new DisposedException(); + + if(hermit == null) initialiseReasoner(); + int answerCounter = 0, counter = 0; + for(; answers.isValid(); answers.moveNext()) { + ++counter; + if(check(answers.getTuple())) ++answerCounter; + } + answers.dispose(); + + Utility.logDebug("The number of individuals to be checked by HermiT: " + counter, + "The number of correct answers: " + answerCounter); + return answerCounter; + } + + @Override + public boolean check(AnswerTuple answerTuple) { + if(isDisposed()) throw new DisposedException(); + + if(!toCheck) return false; + + if(hermit == null) initialiseReasoner(); + if(tag != 0) return tag == 1; + ++counter; + Timer t = new Timer(); + Map sub = answerTuple.getAssignment(answerVariable[1]); + Set toCheckAxioms = qGraph.getAssertions(sub); + +// for (OWLAxiom axiom: toCheckAxioms) System.out.println(axiom.toString()); + + if(hermit.isEntailed(toCheckAxioms)) { + Utility.logDebug("@TIME to check one tuple: " + t.duration()); + return true; + } + Utility.logDebug("@TIME to check one tuple: " + t.duration()); + return false; } - private int tag = 0; - AnswerTuple topAnswerTuple = null, botAnswerTuple = null; - + @Override + public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + + if(hermit == null) initialiseReasoner(); + return hermit.isConsistent(); + } + + public void dispose() { + super.dispose(); + + Utility.logInfo("Hermit was called " + counter + " times."); + if(hermit != null) hermit.dispose(); + hermit = null; + } + + public void setDependencyGraph(DependencyGraph dGraph) { + if(isDisposed()) throw new DisposedException(); + + this.dGraph = dGraph; + } + private void initialiseReasoner() { - qGraph = new QueryGraph(queryClause.getBodyAtoms(), answerVariable[1], ontology); - OWLOntologyManager manager = ontology.getOWLOntologyManager(); + qGraph = new QueryGraph(queryClause.getBodyAtoms(), answerVariable[1], ontology); + OWLOntologyManager manager = ontology.getOWLOntologyManager(); factory = manager.getOWLDataFactory(); - - if (hermit != null) hermit.dispose(); - - if (dGraph != null && answerVariable[1].length == 1 && (dGraph.getExits().size() > 1 || dGraph.getEntrances().size() > 1)) { - Set topAxioms = new HashSet(); - Set botAxioms = new HashSet(); + + if(hermit != null) hermit.dispose(); + + if(dGraph != null && answerVariable[1].length == 1 && (dGraph.getExits().size() > 1 || dGraph.getEntrances() + .size() > 1)) { + Set topAxioms = new HashSet(); + Set botAxioms = new HashSet(); addTopAndBotTuple(topAxioms, botAxioms); - manager.addAxioms(ontology, topAxioms); - manager.addAxioms(ontology, botAxioms); + manager.addAxioms(ontology, topAxioms); + manager.addAxioms(ontology, botAxioms); hermit = new Reasoner(ontology); - boolean topValid = true; - if (!hermit.isConsistent() || topAnswerTuple != null && (topValid = check(topAnswerTuple))) { + boolean topValid = true; + if(!hermit.isConsistent() || topAnswerTuple != null && (topValid = check(topAnswerTuple))) { hermit.dispose(); - manager.removeAxioms(ontology, topAxioms); - hermit = new Reasoner(ontology); - } else { - if (!topValid) tag = -1; - else - if (botAnswerTuple != null && check(botAnswerTuple)) tag = 1; + manager.removeAxioms(ontology, topAxioms); + hermit = new Reasoner(ontology); + } + else { + if(!topValid) tag = -1; + else if(botAnswerTuple != null && check(botAnswerTuple)) tag = 1; } } - else + else hermit = new Reasoner(ontology); } - + private void addTopAndBotTuple(Set topAxioms, Set botAxioms) { String top_str = Namespace.PAGODA_ANONY + "top", bot_str = Namespace.PAGODA_ANONY + "bot"; - topAnswerTuple = new AnswerTuple(new uk.ac.ox.cs.JRDFox.model.Individual[] { uk.ac.ox.cs.JRDFox.model.Individual.create(top_str) } ); - botAnswerTuple = new AnswerTuple(new uk.ac.ox.cs.JRDFox.model.Individual[] { uk.ac.ox.cs.JRDFox.model.Individual.create(bot_str) } ); - OWLIndividual top_ind = factory.getOWLNamedIndividual(IRI.create(top_str)), bot_ind = factory.getOWLNamedIndividual(IRI.create(bot_str)); - Map counter = new HashMap(); - + topAnswerTuple = + new AnswerTuple(new uk.ac.ox.cs.JRDFox.model.Individual[]{uk.ac.ox.cs.JRDFox.model.Individual.create(top_str)}); + botAnswerTuple = + new AnswerTuple(new uk.ac.ox.cs.JRDFox.model.Individual[]{uk.ac.ox.cs.JRDFox.model.Individual.create(bot_str)}); + OWLIndividual top_ind = factory.getOWLNamedIndividual(IRI.create(top_str)), bot_ind = + factory.getOWLNamedIndividual(IRI.create(bot_str)); + Map counter = new HashMap(); + Set topAnswers = new HashSet(), botAnswers = new HashSet(); - OWLIndividual sub, obj; - if (dGraph.getExits().size() > 1) { - for (Clique answerClique: dGraph.getExits()) - topAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative().getAnswerTuple().getGroundTerm(0)).getIRI()); + OWLIndividual sub, obj; + if(dGraph.getExits().size() > 1) { + for(Clique answerClique : dGraph.getExits()) + topAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative() + .getAnswerTuple() + .getGroundTerm(0)).getIRI()); } - else topAnswerTuple = null; - - if (dGraph.getEntrances().size() > 1) { - for (Clique answerClique: dGraph.getEntrances()) - botAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative().getAnswerTuple().getGroundTerm(0)).getIRI()); + else topAnswerTuple = null; + + if(dGraph.getEntrances().size() > 1) { + for(Clique answerClique : dGraph.getEntrances()) + botAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative() + .getAnswerTuple() + .getGroundTerm(0)).getIRI()); } - else botAnswerTuple = null; - - for (OWLAxiom axiom: ontology.getABoxAxioms(true)) - if (axiom instanceof OWLClassAssertionAxiom) { + else botAnswerTuple = null; + + for(OWLAxiom axiom : ontology.getABoxAxioms(true)) + if(axiom instanceof OWLClassAssertionAxiom) { OWLClassAssertionAxiom ca = (OWLClassAssertionAxiom) axiom; sub = ca.getIndividual(); - if (topAnswers.contains(sub.toStringID())) + if(topAnswers.contains(sub.toStringID())) topAxioms.add(factory.getOWLClassAssertionAxiom(ca.getClassExpression(), top_ind)); - if (botAnswers.contains(sub.toStringID())) + if(botAnswers.contains(sub.toStringID())) inc(counter, factory.getOWLClassAssertionAxiom(ca.getClassExpression(), bot_ind)); } - else if (axiom instanceof OWLObjectPropertyAssertionAxiom) { - OWLObjectPropertyAssertionAxiom oa = (OWLObjectPropertyAssertionAxiom) axiom; - sub = oa.getSubject(); obj = oa.getObject(); - if (topAnswers.contains(sub.toStringID())) - if (topAnswers.contains(obj.toStringID())) + else if(axiom instanceof OWLObjectPropertyAssertionAxiom) { + OWLObjectPropertyAssertionAxiom oa = (OWLObjectPropertyAssertionAxiom) axiom; + sub = oa.getSubject(); + obj = oa.getObject(); + if(topAnswers.contains(sub.toStringID())) + if(topAnswers.contains(obj.toStringID())) topAxioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), top_ind, top_ind)); - else + else topAxioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), top_ind, obj)); else { - if (topAnswers.contains(obj.toStringID())) + if(topAnswers.contains(obj.toStringID())) topAxioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), sub, top_ind)); } - - if (botAnswers.contains(sub.toStringID())) - if (botAnswers.contains(obj.toStringID())) + + if(botAnswers.contains(sub.toStringID())) + if(botAnswers.contains(obj.toStringID())) inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), bot_ind, bot_ind)); - else + else inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), bot_ind, obj)); else { - if (botAnswers.contains(obj.toStringID())) + if(botAnswers.contains(obj.toStringID())) inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), sub, bot_ind)); } - + } - else if (axiom instanceof OWLDataPropertyAssertionAxiom) { - OWLDataPropertyAssertionAxiom da = (OWLDataPropertyAssertionAxiom) axiom; - sub = da.getSubject(); - if (topAnswers.contains(sub.toStringID())) + else if(axiom instanceof OWLDataPropertyAssertionAxiom) { + OWLDataPropertyAssertionAxiom da = (OWLDataPropertyAssertionAxiom) axiom; + sub = da.getSubject(); + if(topAnswers.contains(sub.toStringID())) topAxioms.add(factory.getOWLDataPropertyAssertionAxiom(da.getProperty(), top_ind, da.getObject())); - - if (botAnswers.contains(sub.toStringID())) + + if(botAnswers.contains(sub.toStringID())) inc(counter, factory.getOWLDataPropertyAssertionAxiom(da.getProperty(), bot_ind, da.getObject())); } - - int number = botAnswers.size(); - for (Map.Entry entry: counter.entrySet()) { - if (entry.getValue() == number) + + int number = botAnswers.size(); + for(Map.Entry entry : counter.entrySet()) { + if(entry.getValue() == number) botAxioms.add(entry.getKey()); } } private void inc(Map counter, OWLAxiom newAxiom) { - Integer number = counter.get(newAxiom); - if (number == null) counter.put(newAxiom, 1); - else counter.put(newAxiom, number + 1); - } + if(isDisposed()) throw new DisposedException(); - @Override - public int check(AnswerTuples answers) { - if (hermit == null) initialiseReasoner(); - int answerCounter = 0, counter = 0; - for (; answers.isValid(); answers.moveNext()) { - ++counter; - if (check(answers.getTuple())) ++answerCounter; - } - answers.dispose(); - - Utility.logDebug("The number of individuals to be checked by HermiT: " + counter, - "The number of correct answers: " + answerCounter); - return answerCounter; - } - - private int counter = 0; - - @Override - public boolean check(AnswerTuple answerTuple) { - if (!toCheck) return false; - - if (hermit == null) initialiseReasoner(); - if (tag != 0) return tag == 1; - ++counter; - Timer t = new Timer(); - Map sub = answerTuple.getAssignment(answerVariable[1]); - Set toCheckAxioms = qGraph.getAssertions(sub); - -// for (OWLAxiom axiom: toCheckAxioms) System.out.println(axiom.toString()); - - if (hermit.isEntailed(toCheckAxioms)) { - Utility.logDebug("@TIME to check one tuple: " + t.duration()); - return true; - } - Utility.logDebug("@TIME to check one tuple: " + t.duration()); - return false; - } - - @Override - public boolean isConsistent() { - if (hermit == null) initialiseReasoner(); - return hermit.isConsistent(); - } - - - public void dispose() { - Utility.logInfo("Hermit was called " + counter + " times."); - if (hermit != null) hermit.dispose(); - hermit = null; - } - - private DependencyGraph dGraph = null; - - public void setDependencyGraph(DependencyGraph dGraph) { - this.dGraph = dGraph; + Integer number = counter.get(newAxiom); + if(number == null) counter.put(newAxiom, 1); + else counter.put(newAxiom, number + 1); } } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java index fe43e09..107d3ca 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java @@ -104,16 +104,22 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void outputInstance4BinaryPredicate(String iri, String filename) { + if(isDisposed()) throw new DisposedException(); + Utility.redirectCurrentOut(filename); outputInstance4BinaryPredicate(iri); Utility.closeCurrentOut(); } public void outputInstance4BinaryPredicate(String iri) { + if(isDisposed()) throw new DisposedException(); + outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); } public void outputInstanceNumbers(String filename) { + if(isDisposed()) throw new DisposedException(); + TupleIterator predicateTuples = null; TupleIterator instanceTuples; Set number = new HashSet(); @@ -177,6 +183,8 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { + if(isDisposed()) throw new DisposedException(); + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); // iter.open(); @@ -184,16 +192,22 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { + if(isDisposed()) throw new DisposedException(); + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); // iter.open(); return iter; } public void setExpandEquality(boolean flag) { + if(isDisposed()) throw new DisposedException(); + parameters.m_expandEquality = flag; } public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { + if(isDisposed()) throw new DisposedException(); + parameters.m_expandEquality = false; TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); // iter.open(); @@ -202,10 +216,14 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { + if(isDisposed()) throw new DisposedException(); + return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); } public String getUnusedRules(Collection clauses, boolean toUpdate) { + if(isDisposed()) throw new DisposedException(); + DLClause clause; for(Iterator iter = clauses.iterator(); iter.hasNext(); ) { if(materialisedRules.contains(clause = iter.next())) @@ -219,10 +237,14 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void outputMaterialisedRules() { + if(isDisposed()) throw new DisposedException(); + System.out.println(DLClauseHelper.toString(materialisedRules)); } public void outputAnswers(String query) { + if(isDisposed()) throw new DisposedException(); + TupleIterator iter = null; try { iter = internal_evaluate(query); @@ -241,6 +263,8 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void outputInstance4UnaryPredicate(String iri) { + if(isDisposed()) throw new DisposedException(); + outputAnswers("select ?x where { ?x " + " <" + iri @@ -248,14 +272,20 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void outputSubjects(String p, String o) { + if(isDisposed()) throw new DisposedException(); + outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); } public void outputObjects(String s, String p) { + if(isDisposed()) throw new DisposedException(); + outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); } public void outputIDBFacts() { + if(isDisposed()) throw new DisposedException(); + TupleIterator iter = null; try { iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); @@ -274,10 +304,14 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void outputType4Individual(String iri) { + if(isDisposed()) throw new DisposedException(); + outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); } public int getSameAsNumber() { + if(isDisposed()) throw new DisposedException(); + TupleIterator iter = null; int counter = 0; try { @@ -294,6 +328,8 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public UFS getEqualityGroups(boolean reuse) { + if(isDisposed()) throw new DisposedException(); + if(reuse && equalityGroups != null) return equalityGroups; equalityGroups = new UFS(); @@ -317,6 +353,8 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void clearRulesAndIDBFacts(Collection collection) { + if(isDisposed()) throw new DisposedException(); + // performDeletion(collection); collection.clear(); try { diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java index 3f1ed7e..5f6e362 100644 --- a/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java +++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java @@ -3,6 +3,7 @@ package uk.ac.ox.cs.pagoda.rules.approximators; import org.semanticweb.HermiT.model.*; import uk.ac.ox.cs.pagoda.multistage.MultiStageUpperProgram; import uk.ac.ox.cs.pagoda.rules.ExistConstantApproximator; +import uk.ac.ox.cs.pagoda.util.Utility; import uk.ac.ox.cs.pagoda.util.tuples.Tuple; import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder; @@ -53,12 +54,22 @@ public class LimitedSkolemisationApproximator implements TupleDependentApproxima } + public int getMaxDepth(Tuple violationTuple) { + int maxDepth = 0; + for(Individual individual : violationTuple) + maxDepth = Integer.max(maxDepth, skolemTermsManager.getDepthOf(individual)); + + return maxDepth; + } + private Collection overApprox(DLClause clause, DLClause originalClause, Collection> violationTuples) { ArrayList result = new ArrayList<>(); for (Tuple violationTuple : violationTuples) - if (getMaxDepth(violationTuple) > maxTermDepth) + if(getMaxDepth(violationTuple) > maxTermDepth) { result.addAll(alternativeApproximator.convert(clause, originalClause, null)); + Utility.logDebug("Approximating maximal individual by a constant in rule:" + originalClause); + } else result.addAll(getGroundSkolemisation(clause, originalClause, violationTuple)); @@ -138,13 +149,4 @@ public class LimitedSkolemisationApproximator implements TupleDependentApproxima // END: copy and paste } - - - public int getMaxDepth(Tuple violationTuple) { - int maxDepth = 0; - for (Individual individual : violationTuple) - maxDepth = Integer.max(maxDepth, skolemTermsManager.getDepthOf(individual)); - - return maxDepth; - } } diff --git a/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java b/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java index 90a2ed4..f3f08e9 100644 --- a/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java +++ b/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java @@ -14,11 +14,12 @@ import uk.ac.ox.cs.pagoda.reasoner.full.HermitChecker; import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; import java.util.HashSet; import java.util.Set; -public class HermitSummaryFilter implements Checker { +public class HermitSummaryFilter extends Checker { public static final String QueryAnswerTermPrefix = TrackingRuleEncoder.QueryPredicate + "_term"; QueryRecord m_record; @@ -97,6 +98,8 @@ public class HermitSummaryFilter implements Checker { @Override public boolean isConsistent() { + if(isDisposed()) throw new DisposedException(); + if (summary == null) summary = new Summary(endomorphismChecker.getOntology(), endomorphismChecker.getGraph()); @@ -107,31 +110,22 @@ public class HermitSummaryFilter implements Checker { return endomorphismChecker.isConsistent(); } - private void initialiseSummarisedReasoner() { - Timer t = new Timer(); - summarisedHermiT = new HermitChecker(summary.getSummary(), summary.getSummary(m_record)); -// summary.save("summarised_query" + m_record.getQueryID() + ".owl"); - if(summarisedConsistency = summarisedHermiT.isConsistent()) - Utility.logDebug("The summary of ABox is consistent with the TBox."); - else - Utility.logDebug("The summary of ABox is NOT consistent with the TBox."); - m_record.addProcessingTime(Step.SUMMARISATION, t.duration()); - } - @Override public int check(AnswerTuples answers) { + if(isDisposed()) throw new DisposedException(); + Timer t = new Timer(); OWLOntology newOntology = addOntologyWithQueryPreciate(endomorphismChecker.getOntology(), m_record, answers); summary = new Summary(newOntology); initialiseSummarisedReasoner(); - if (summarisedConsistency) { + if(summarisedConsistency) { Set passed = new HashSet(), succ = new HashSet(); Set falsified = new HashSet(), fail = new HashSet(); int counter = 0; AnswerTuple representative; - for (AnswerTuple answer; answers.isValid(); answers.moveNext()) { + for(AnswerTuple answer; answers.isValid(); answers.moveNext()) { ++counter; answer = answers.getTuple(); representative = summary.getSummary(answer); @@ -140,13 +134,13 @@ public class HermitSummaryFilter implements Checker { else if(succ.contains(representative)) passed.add(answer); else if(summarisedHermiT.check(representative)) { - succ.add(representative); - passed.add(answer); - } - else { - fail.add(representative); + succ.add(representative); + passed.add(answer); + } + else { + fail.add(representative); falsified.add(answer); - } + } } answers.dispose(); @@ -177,16 +171,31 @@ public class HermitSummaryFilter implements Checker { @Override public boolean check(AnswerTuple answer) { - AnswerTuple representative = summary.getSummary(answer); - if (summarisedHermiT.isConsistent() && !summarisedHermiT.check(representative)) + if(isDisposed()) throw new DisposedException(); + + AnswerTuple representative = summary.getSummary(answer); + if(summarisedHermiT.isConsistent() && !summarisedHermiT.check(representative)) return false; - return endomorphismChecker.check(answer); + return endomorphismChecker.check(answer); } @Override public void dispose() { - if (summarisedHermiT != null) summarisedHermiT.dispose(); - endomorphismChecker.dispose(); + super.dispose(); + + if(summarisedHermiT != null) summarisedHermiT.dispose(); + endomorphismChecker.dispose(); + } + + private void initialiseSummarisedReasoner() { + Timer t = new Timer(); + summarisedHermiT = new HermitChecker(summary.getSummary(), summary.getSummary(m_record)); +// summary.save("summarised_query" + m_record.getQueryID() + ".owl"); + if(summarisedConsistency = summarisedHermiT.isConsistent()) + Utility.logDebug("The summary of ABox is consistent with the TBox."); + else + Utility.logDebug("The summary of ABox is NOT consistent with the TBox."); + m_record.addProcessingTime(Step.SUMMARISATION, t.duration()); } } diff --git a/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java b/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java index be6627a..7b7d48d 100644 --- a/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java +++ b/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java @@ -1,5 +1,7 @@ package uk.ac.ox.cs.pagoda.util; +import org.apache.log4j.Logger; + import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; @@ -20,6 +22,8 @@ public class PagodaProperties { in.close(); if(config.containsKey("debug")) { debug = Boolean.parseBoolean(config.getProperty("debug")); + Logger.getLogger("PagodaProperties") + .info("Debugging mode is enabled (you can disable it from file \"pagoda.properties\")"); } } catch(IOException e) { e.printStackTrace(); diff --git a/src/uk/ac/ox/cs/pagoda/util/Utility.java b/src/uk/ac/ox/cs/pagoda/util/Utility.java index e98cc81..cef4abd 100644 --- a/src/uk/ac/ox/cs/pagoda/util/Utility.java +++ b/src/uk/ac/ox/cs/pagoda/util/Utility.java @@ -29,7 +29,7 @@ public class Utility { private static StringBuilder logMessage = new StringBuilder(); static { - LOGS = Logger.getLogger("PAGOdA"); + LOGS = Logger.getLogger("Pagoda"); LOGS.setLevel(Level.DEBUG); } @@ -37,6 +37,10 @@ public class Utility { outs.push(System.out); } + static { + + } + public static String getGlobalTempDirAbsolutePath() { if(tempDir == null) { try { -- cgit v1.2.3