From 7a68441a541b12b22587fb53072193e1130049ff Mon Sep 17 00:00:00 2001 From: RncLsn Date: Tue, 19 May 2015 19:06:04 +0100 Subject: Deleted unused classes. --- .../cs/pagoda/multistage/FoldedApplication2.java | 51 ----- .../cs/pagoda/multistage/IndividualCollector.java | 69 ------ .../pagoda/multistage/RestrictedApplication2.java | 177 --------------- .../cs/pagoda/multistage/TwoStageApplication.java | 238 -------------------- .../cs/pagoda/multistage/TwoStageQueryEngine.java | 107 --------- .../ox/cs/pagoda/reasoner/ConsistencyManager2.java | 78 ------- .../ox/cs/pagoda/reasoner/ELHOQueryReasoner.java | 3 +- .../ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java | 2 +- .../ac/ox/cs/pagoda/reasoner/HermiTReasoner.java | 37 +-- .../ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | 2 +- .../ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java | 15 +- .../ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java | 26 +-- .../LimitedSkolemisationApproximator.java | 9 +- .../rules/approximators/SkolemTermsManager.java | 77 +++---- .../tracking/TrackingRuleEncoderDisjVar1.java | 247 ++++++++++----------- 15 files changed, 196 insertions(+), 942 deletions(-) delete mode 100644 src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java delete mode 100644 src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java delete mode 100644 src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java delete mode 100644 src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java delete mode 100644 src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java delete mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java (limited to 'src/uk/ac') diff --git a/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java b/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java deleted file mode 100644 index 8212733..0000000 --- a/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java +++ /dev/null @@ -1,51 +0,0 @@ -package uk.ac.ox.cs.pagoda.multistage; - -import java.util.Collection; - -import org.semanticweb.HermiT.model.AtLeastConcept; -import org.semanticweb.HermiT.model.Atom; -import org.semanticweb.HermiT.model.DLClause; - -import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; -import uk.ac.ox.cs.pagoda.query.GapByStore4ID; -import uk.ac.ox.cs.pagoda.rules.DatalogProgram; -import uk.ac.ox.cs.pagoda.rules.Program; - -public class FoldedApplication2 extends TwoStageApplication { - - public FoldedApplication2(TwoStageQueryEngine engine, DatalogProgram program, GapByStore4ID gap) { - super(engine, program, gap); - } - - @Override - protected void addAuxiliaryRules() { - Collection overClauses; - DLClause disjunct; - Atom[] bodyAtoms; - int i; - for (DLClause constraint: constraints) - for (Atom headAtom: constraint.getHeadAtoms()) - if (headAtom.getDLPredicate() instanceof AtLeastConcept) { - disjunct = DLClause.create(new Atom[] {headAtom}, constraint.getBodyAtoms()); - overClauses = overExist.convert(disjunct, getOriginalClause(constraint)); - bodyAtoms = new Atom[constraint.getBodyLength() + 1]; - bodyAtoms[0] = getNAFAtom(headAtom); - i = 0; - for (Atom bodyAtom: constraint.getBodyAtoms()) - bodyAtoms[++i] = bodyAtom; - for (DLClause overClause: overClauses) - if (DLClauseHelper.hasSubsetBodyAtoms(disjunct, constraint)) - addDatalogRule(DLClause.create(new Atom[] {overClause.getHeadAtom(0)}, bodyAtoms)); - } - else - addDatalogRule(DLClause.create(new Atom[] {headAtom}, constraint.getBodyAtoms())); - } - - @Override - protected Collection getInitialClauses(Program program) { - return program.getClauses(); - } - - - -} diff --git a/src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java b/src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java deleted file mode 100644 index a9c127b..0000000 --- a/src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java +++ /dev/null @@ -1,69 +0,0 @@ -package uk.ac.ox.cs.pagoda.multistage; - -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.Value; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.rio.RDFHandler; -import org.openrdf.rio.RDFHandlerException; -import uk.ac.ox.cs.JRDFox.model.Individual; -import uk.ac.ox.cs.pagoda.rules.approximators.SkolemTermsManager; -import uk.ac.ox.cs.pagoda.util.Namespace; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -public class IndividualCollector implements RDFHandler { - - boolean addedSkolemised = false; - Set individuals = new HashSet(); - - @Override - public void startRDF() throws RDFHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void endRDF() throws RDFHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleNamespace(String prefix, String uri) - throws RDFHandlerException { - // TODO Auto-generated method stub - - } - - @Override - public void handleStatement(Statement st) throws RDFHandlerException { - Resource sub = st.getSubject(); - if (sub instanceof URIImpl) - individuals.add(Individual.create(sub.toString())); - if (!st.getPredicate().toString().equals(Namespace.RDF_TYPE)) { - Value obj = st.getObject(); - if (obj instanceof URIImpl) - individuals.add(Individual.create(sub.toString())); - } - } - - @Override - public void handleComment(String comment) throws RDFHandlerException { - // TODO Auto-generated method stub - - } - - public Collection getAllIndividuals() { - if (!addedSkolemised) { - int number = SkolemTermsManager.getInstance().getNumberOfSkolemisedIndividual(); - for (int i = 0; i < number; ++i) - individuals.add(Individual.create(SkolemTermsManager.skolemisedIndividualPrefix + i)); - addedSkolemised = true; - } - return individuals; - } - -} diff --git a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java b/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java deleted file mode 100644 index 66e8a17..0000000 --- a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java +++ /dev/null @@ -1,177 +0,0 @@ -package uk.ac.ox.cs.pagoda.multistage; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Comparator; -import java.util.LinkedList; - -import org.semanticweb.HermiT.model.AtLeastConcept; -import org.semanticweb.HermiT.model.Atom; -import org.semanticweb.HermiT.model.AtomicConcept; -import org.semanticweb.HermiT.model.DLClause; -import org.semanticweb.HermiT.model.DLPredicate; - -import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; -import uk.ac.ox.cs.pagoda.multistage.treatement.SimpleComparator; -import uk.ac.ox.cs.pagoda.query.GapByStore4ID; -import uk.ac.ox.cs.pagoda.rules.DatalogProgram; -import uk.ac.ox.cs.pagoda.rules.Program; - -public class RestrictedApplication2 extends TwoStageApplication { - - private Normalisation norm; - private boolean hasDisjunctions; - private Comparator disjunctComparator; - - public RestrictedApplication2(TwoStageQueryEngine engine, DatalogProgram program, GapByStore4ID gap) { - super(engine, program, gap); - if (hasDisjunctions) { - addNegativeDatalogRules(); - disjunctComparator = new SimpleComparator(); - } - } - - private void addNegativeDatalogRules() { - Collection allRules = new LinkedList(rules); - allRules.addAll(constraints); - for (DLClause clause: allRules) { - addAddtionalDatalogRules(clause); - } - allRules.clear(); - } - - private void addAddtionalDatalogRules(DLClause clause) { - Atom[] headAtoms = clause.getHeadAtoms(); - Atom[] bodyAtoms = clause.getBodyAtoms(); - int headLength = headAtoms.length; - int bodyLength = bodyAtoms.length; - DLClause tClause; - if (m_bottom.isBottomRule(clause)) { - if (clause.getBodyLength() == 1) return ; - for (int i = 0; i < bodyLength; ++i) - if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) { - Atom[] newBodyAtoms = new Atom[bodyLength - 1]; - for (int j = 0; j < bodyLength - 1; ++j) - newBodyAtoms[j] = j < i ? bodyAtoms[j] : bodyAtoms[j + 1]; - - Atom negativeAtom = MultiStageUpperProgram.getNegativeAtom(bodyAtoms[i]); - tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms); - addDatalogRule(tClause); - } - } - else if (headLength > 1) { - for (int i = 0; i < headLength; ++i) { - DLPredicate p = headAtoms[i].getDLPredicate(); - if (!(p instanceof AtomicConcept)) { - return ; - } - } - - for (int i = 0; i < headLength; ++i) { - Atom[] newBodyAtoms = new Atom[headLength + bodyLength - 1]; - for (int j = 0; j < headLength + bodyLength - 1; ++j) - newBodyAtoms[j] = j < bodyLength ? bodyAtoms[j] : - j < bodyLength + i ? MultiStageUpperProgram.getNegativeAtom(headAtoms[j - bodyLength]) : - MultiStageUpperProgram.getNegativeAtom(headAtoms[j - bodyLength + 1]); - - tClause = DLClause.create(new Atom[] { headAtoms[i] }, newBodyAtoms); - addDatalogRule(tClause); - } - } - else if (headLength == 1) { - DLPredicate p = clause.getHeadAtom(0).getDLPredicate(); - if (p instanceof AtomicConcept) { - Atom negativeHeadAtom = MultiStageUpperProgram.getNegativeAtom(clause.getHeadAtom(0)); - for (int i = 0; i < bodyLength; ++i) - if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) { - Atom[] newBodyAtoms = new Atom[clause.getBodyLength()]; - newBodyAtoms[0] = negativeHeadAtom; - for (int j = 1; j < bodyLength; ++j) - newBodyAtoms[j] = j <= i ? bodyAtoms[j - 1] : bodyAtoms[j]; - - tClause = DLClause.create(new Atom[] {MultiStageUpperProgram.getNegativeAtom(bodyAtoms[i])}, newBodyAtoms); - addDatalogRule(tClause); - } - } - else if (p instanceof AtLeastConcept && clause.getBodyLength() == 1 && clause.getBodyAtom(0).getDLPredicate() instanceof AtomicConcept) { - AtLeastConcept alc = (AtLeastConcept) p; - AtomicConcept ac = norm.getLeftAuxiliaryConcept(alc, true); - if (ac != null) { - Atom bodyAtom = clause.getBodyAtom(0); - addDatalogRule(DLClause.create(new Atom[] {MultiStageUpperProgram.getNegativeAtom(bodyAtom)}, - new Atom[] {MultiStageUpperProgram.getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} )); - } - } - } - } - - @Override - protected void addAuxiliaryRules() { - for (DLClause constraint: constraints) - if (constraint.getHeadLength() <= 1) - processExistentialRule(constraint); - else - processDisjunctiveRule(constraint); - } - - private static final Atom[] empty = new Atom[0]; - - private void processDisjunctiveRule(DLClause constraint) { - int headLength = constraint.getHeadLength(); - Atom[] orderedAtoms = new Atom[headLength]; - for (int i = 0; i < headLength; ++i) - orderedAtoms[i] = constraint.getHeadAtom(i); - - Arrays.sort(orderedAtoms, disjunctComparator); - - Collection bodyAtoms = new LinkedList(); - for (int i = 0; i < headLength; ++i) { - bodyAtoms.add(getNAFAtom(orderedAtoms[i])); - } - for (Atom atom: constraint.getBodyAtoms()) - bodyAtoms.add(atom); - - Atom negAtom; - for (Atom atom: constraint.getHeadAtoms()) { - negAtom = MultiStageUpperProgram.getNegativeAtom(atom); - bodyAtoms.add(getNAFAtom(negAtom)); - addDatalogRule(DLClause.create(new Atom[] {atom}, bodyAtoms.toArray(empty))); - } - } - - private void processExistentialRule(DLClause constraint) { - Atom[] bodyAtoms = new Atom[constraint.getBodyLength() + 1]; - bodyAtoms[0] = getNAFAtom(constraint.getHeadAtom(0)); - int i = 0; - for (Atom atom: constraint.getBodyAtoms()) - bodyAtoms[++i] = atom; - - Collection overClauses = overExist.convert(constraint, getOriginalClause(constraint)); - for (DLClause clause: overClauses) - if (DLClauseHelper.hasSubsetBodyAtoms(clause, constraint)) - addDatalogRule(DLClause.create(new Atom[] {clause.getHeadAtom(0)}, bodyAtoms)); - } - - @Override - protected Collection getInitialClauses(Program program) { - Collection clauses = program.getClauses(); - hasDisjunctions = false; - for (DLClause clause: clauses) - if (clause.getHeadLength() > 1) { - hasDisjunctions = true; - break; - } - - if (hasDisjunctions) { - norm = new Normalisation(clauses, program.getOntology(), m_bottom); - norm.process(); - clauses = norm.m_normClauses; - } - return clauses; - } - - protected DLClause getOriginalClause(DLClause clause) { - DLClause original = super.getOriginalClause(clause); - return norm.getOriginalClause(original); - } -} diff --git a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java deleted file mode 100644 index 79627d9..0000000 --- a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java +++ /dev/null @@ -1,238 +0,0 @@ -package uk.ac.ox.cs.pagoda.multistage; - -import org.semanticweb.HermiT.model.*; -import uk.ac.ox.cs.JRDFox.JRDFStoreException; -import uk.ac.ox.cs.JRDFox.store.TupleIterator; -import uk.ac.ox.cs.pagoda.MyPrefixes; -import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; -import uk.ac.ox.cs.pagoda.hermit.RuleHelper; -import uk.ac.ox.cs.pagoda.query.GapByStore4ID; -import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager; -import uk.ac.ox.cs.pagoda.rules.DatalogProgram; -import uk.ac.ox.cs.pagoda.rules.Program; -import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist; -import uk.ac.ox.cs.pagoda.util.Namespace; -import uk.ac.ox.cs.pagoda.util.SparqlHelper; -import uk.ac.ox.cs.pagoda.util.Utility; - -import java.util.*; - -abstract class TwoStageApplication { - - private static final String NAF_suffix = "_NAF"; - protected TwoStageQueryEngine engine; - protected MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes; - protected Set rules = new HashSet(); - protected Collection constraints = new LinkedList(); - protected BottomStrategy m_bottom; - protected Set toGenerateNAFFacts = new HashSet(); - protected OverApproxExist overExist = new OverApproxExist(); - Program lowerProgram; - boolean m_incrementally = true; - Set allIndividuals = new HashSet(); - RDFoxTripleManager tripleManager; - private GapByStore4ID gap; - private StringBuilder datalogRuleText = new StringBuilder(); - private Map map = new HashMap(); - - public TwoStageApplication(TwoStageQueryEngine engine, DatalogProgram program, GapByStore4ID gap) { - this.engine = engine; - tripleManager = new RDFoxTripleManager(engine.getDataStore(), m_incrementally); - this.gap = gap; - m_bottom = program.getUpperBottomStrategy(); - lowerProgram = program.getLower(); - - Variable X = Variable.create("X"); - Collection clauses = getInitialClauses(program.getGeneral()); - Collection introducedConstraints = new LinkedList(); - LinkedList newHeadAtoms = new LinkedList(); - for (DLClause clause : m_bottom.process(clauses)) { - if (m_bottom.isBottomRule(clause) - || clause.getHeadLength() == 1 - && !(clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast)) - addDatalogRule(clause); - else { - newHeadAtoms.clear(); - boolean changed = false; - for (Atom atom : clause.getHeadAtoms()) { - if (atom.getDLPredicate() instanceof AtLeastConcept) { - AtLeastConcept atLeast = (AtLeastConcept) atom - .getDLPredicate(); - if (atLeast.getToConcept() instanceof AtomicNegationConcept) { - AtomicConcept positive = ((AtomicNegationConcept) atLeast - .getToConcept()).getNegatedAtomicConcept(); - AtomicConcept negative = OverApproxExist - .getNegationConcept(positive); - Atom atom1 = Atom.create(positive, X); - Atom atom2 = Atom.create(negative, X); - introducedConstraints.add(DLClause.create( - new Atom[0], new Atom[] { atom1, atom2 })); - newHeadAtoms.add(Atom.create(AtLeastConcept.create( - atLeast.getArity(), atLeast.getOnRole(), - negative), atom.getArgument(0))); - changed = true; - continue; - } - } else if (atom.getDLPredicate() instanceof AtLeastDataRange) - changed = true; - else - newHeadAtoms.add(atom); - - } - if (!changed) - constraints.add(clause); - else if (!newHeadAtoms.isEmpty()) { - DLClause newClause = DLClause.create( - newHeadAtoms.toArray(new Atom[0]), - clause.getBodyAtoms()); - map.put(newClause, clause); - constraints.add(newClause); - } - } - } - - for (DLClause clause : m_bottom.process(introducedConstraints)) - addDatalogRule(clause); - - } - - int materialise() { - StringBuilder builder = new StringBuilder(getDatalogRuleText()); - for (DLClause clause: lowerProgram.getClauses()) - if (!rules.contains(clause)) - builder.append(RuleHelper.getText(clause)); - - engine.materialise(builder.toString(), null, false); - addAuxiliaryRules(); - addAuxiliaryNAFFacts(); - engine.materialise(getDatalogRuleText(), gap, m_incrementally); - return engine.isValid() ? 1 : 0; - } - - void checkNAFFacts() { - int counter = 0; - TupleIterator tuples = null; - for (Atom atom : toGenerateNAFFacts) { - try { - counter = 0; - atom = getNAFAtom(atom); - tuples = engine.internal_evaluate(SparqlHelper.getSPARQLQuery( - new Atom[] { atom }, atom.getArgumentVariable(0) - .getName())); - for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) { - ++counter; - } - Utility.logDebug(atom + " " + counter); - } catch (JRDFStoreException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } finally { - if (tuples != null) tuples.dispose(); - tuples = null; - } - } - } - - protected void addDatalogRule(DLClause clause) { - if (clause.getBodyAtom(0).equals(clause.getHeadAtom(0))) - return; - rules.add(clause); - datalogRuleText.append(RuleHelper.getText(clause)).append('\n'); - } - - public String getDatalogRuleText() { - StringBuilder program = new StringBuilder(); - program.append(prefixes.prefixesText()); - program.append(datalogRuleText.toString()); - return program.toString(); - } - - protected abstract void addAuxiliaryRules(); - - private void addAuxiliaryNAFFacts() { - - for (int id : tripleManager.getResourceIDs(engine.getAllIndividuals())) - allIndividuals.add(id); - - DLPredicate naf; - DLPredicate p; - for (Atom atom: toGenerateNAFFacts) { - naf = getNAFAtom(atom, false).getDLPredicate(); - p = atom.getDLPredicate(); - - int typeID = tripleManager.getResourceID(Namespace.RDF_TYPE); - int conceptID = tripleManager.getResourceID(((AtomicConcept) naf) - .getIRI()); - for (int answer : generateNAFFacts(p)) { - tripleManager.addTripleByID(new int[] { answer, typeID, - conceptID }); - } - } - } - - private Collection generateNAFFacts(DLPredicate p) { - Variable X = Variable.create("X"); - TupleIterator tuples = null; - Set ret = new HashSet(allIndividuals); - try { - tuples = engine.internal_evaluate(SparqlHelper.getSPARQLQuery( - new Atom[] { Atom.create(p, X) }, "X")); - for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) { - ret.remove(tuples.getResourceID(0)); - } - } catch (JRDFStoreException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } finally { - if (tuples != null) tuples.dispose(); - } - return ret; - } - - protected abstract Collection getInitialClauses(Program program); - - protected Atom getNAFAtom(Atom atom) { - return getNAFAtom(atom, true); - } - - private Atom getNAFAtom(Atom atom, boolean update) { - DLPredicate p = atom.getDLPredicate(); - if (update) { - toGenerateNAFFacts.add(atom); - } - if (p instanceof AtomicConcept) { - AtomicConcept nc = AtomicConcept.create(((AtomicConcept) p) - .getIRI() + "_NAF"); - return Atom.create(nc, atom.getArgument(0)); - } - if (p instanceof Equality || p instanceof AnnotatedEquality) - return Atom.create( - AtomicRole.create(Namespace.EQUALITY + NAF_suffix), - atom.getArgument(0), atom.getArgument(1)); - if (p instanceof Inequality) - atom = Atom.create( - AtomicRole.create(Namespace.INEQUALITY + NAF_suffix), - atom.getArgument(0), atom.getArgument(1)); - // if (p instanceof AtomicRole) { - // AtomicRole nr = AtomicRole.create(((AtomicRole) p).getIRI() + - // NAF_suffix); - // return Atom.create(nr, atom.getArgument(0), atom.getArgument(1)); - // } - if (p instanceof AtLeastConcept) { - AtomicConcept nc = AtomicConcept.create(Normalisation - .getAuxiliaryConcept4Disjunct((AtLeastConcept) p) - + NAF_suffix); - return Atom.create(nc, atom.getArgument(0)); - } - Utility.logError("Unknown DLPredicate in an atom: " + atom); - return null; - } - - protected DLClause getOriginalClause(DLClause clause) { - DLClause original = map.get(clause); - if (original == null) - return clause; - return original; - } - -} \ No newline at end of file diff --git a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java deleted file mode 100644 index 29cf23a..0000000 --- a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java +++ /dev/null @@ -1,107 +0,0 @@ -package uk.ac.ox.cs.pagoda.multistage; - -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.turtle.TurtleParser; -import uk.ac.ox.cs.JRDFox.JRDFStoreException; -import uk.ac.ox.cs.JRDFox.model.Individual; -import uk.ac.ox.cs.pagoda.query.GapByStore4ID; -import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; -import uk.ac.ox.cs.pagoda.rules.DatalogProgram; -import uk.ac.ox.cs.pagoda.util.Timer; -import uk.ac.ox.cs.pagoda.util.Utility; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.Collection; - -public class TwoStageQueryEngine extends StageQueryEngine { - - IndividualCollector m_collector = new IndividualCollector(); - - public TwoStageQueryEngine(String name, boolean checkValidity) { - super(name, checkValidity); - } - - @Override - public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { - TwoStageApplication program = new FoldedApplication2(this, dProgram, gap); - program.materialise(); - } - - @Override - public void importRDFData(String fileName, String importedFile) { - super.importRDFData(fileName, importedFile); - TurtleParser parser = new TurtleParser(); - parser.setRDFHandler(m_collector); - for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) { - FileInputStream inputStream; - try { - inputStream = new FileInputStream(file); - parser.parse(inputStream, ""); - inputStream.close(); - } catch (FileNotFoundException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RDFParseException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (RDFHandlerException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - - @Override - public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { - TwoStageApplication program = new RestrictedApplication2(this, dProgram, gap); - return program.materialise(); - } - - @Override - public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap) { - throw new UnsupportedOperationException("This method is not available in " + getClass()); - } - - public void materialise(String programText, GapByStore4ID gap, boolean incrementally) { - try { - if (gap != null) { - try { - gap.compile(incrementally ? null : programText); - gap.addBackTo(); - } finally { - gap.clear(); - } - } else { - long oldTripleCount = store.getTriplesCount(); - Timer t = new Timer(); - - if (!incrementally) -// store.addRules(new String[] {programText}); - store.importRules(programText); - store.applyReasoning(incrementally); - - long tripleCount = store.getTriplesCount(); - - Utility.logDebug("current store after materialising upper related rules: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); - Utility.logDebug("current store finished the materialisation of upper related rules in " + t.duration() + " seconds."); - } - store.clearRulesAndMakeFactsExplicit(); - } catch (JRDFStoreException e) { - e.printStackTrace(); - } - - } - - public Collection getAllIndividuals() { - return m_collector.getAllIndividuals(); - } - -} - - diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java deleted file mode 100644 index 9191067..0000000 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java +++ /dev/null @@ -1,78 +0,0 @@ -package uk.ac.ox.cs.pagoda.reasoner; - -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyManager; -import uk.ac.ox.cs.pagoda.query.AnswerTuples; -import uk.ac.ox.cs.pagoda.query.QueryRecord; -import uk.ac.ox.cs.pagoda.reasoner.full.Checker; -import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; -import uk.ac.ox.cs.pagoda.tracking.QueryTracker; -import uk.ac.ox.cs.pagoda.util.Utility; - -@Deprecated -public class ConsistencyManager2 extends ConsistencyManager { - - public ConsistencyManager2(MyQueryReasoner reasoner) { - super(reasoner); - fragmentExtracted = true; - } - - protected boolean unsatisfiability(double duration) { - Utility.logDebug("The ontology and dataset is unsatisfiable."); - return false; - } - - protected boolean satisfiability(double duration) { - Utility.logDebug("The ontology and dataset is satisfiable."); - return true; - } - - @Override - boolean check() { -// if (!checkRLLowerBound()) return false; -// if (!checkELLowerBound()) return false; - // TODO test - if (checkUpper(m_reasoner.lazyUpperStore) && checkUpper(m_reasoner.limitedSkolemUpperStore)) return true; - AnswerTuples iter = null; - - try { - iter = m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); - fullQueryRecord.updateUpperBoundAnswers(iter); - } finally { - if (iter != null) iter.dispose(); - } - - if (fullQueryRecord.getNoOfCompleteAnswers() == 0) - return satisfiability(t.duration()); - - try { - extractAxioms(); - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } - - Checker checker = new HermitSummaryFilter(fullQueryRecord, true); // m_reasoner.factory.getSummarisedReasoner(fullQueryRecord); -// fullQueryRecord.saveRelevantOntology("fragment_bottom.owl"); - boolean satisfiable = checker.isConsistent(); - checker.dispose(); - if (!satisfiable) return unsatisfiability(t.duration()); - - return satisfiability(t.duration()); - } - - private void extractAxioms() throws OWLOntologyCreationException { - OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager(); - fullQueryRecord.setRelevantOntology(manager.createOntology()); - QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, fullQueryRecord); - m_reasoner.encoder.setCurrentQuery(fullQueryRecord); - tracker.extract(m_reasoner.trackingStore, null, true); - } - - @Override - public QueryRecord[] getQueryRecords() { - if (botQueryRecords == null) - botQueryRecords = new QueryRecord[] {fullQueryRecord}; - return botQueryRecords; - } - -} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java index ab57ccf..2285b5f 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java @@ -2,7 +2,6 @@ package uk.ac.ox.cs.pagoda.reasoner; import org.semanticweb.karma2.profile.ELHOProfile; import org.semanticweb.owlapi.model.OWLOntology; - import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; import uk.ac.ox.cs.pagoda.query.AnswerTuples; import uk.ac.ox.cs.pagoda.query.QueryRecord; @@ -12,7 +11,7 @@ import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; -public class ELHOQueryReasoner extends QueryReasoner { +class ELHOQueryReasoner extends QueryReasoner { LowerDatalogProgram program; diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java index 6da86ac..a4fa7c3 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java @@ -14,7 +14,7 @@ import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; -public class ELHOUQueryReasoner extends QueryReasoner { +class ELHOUQueryReasoner extends QueryReasoner { DatalogProgram program; diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java index 5511691..d1856c9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java @@ -1,32 +1,22 @@ package uk.ac.ox.cs.pagoda.reasoner; -import java.io.File; -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - import org.semanticweb.HermiT.Reasoner; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.owlapi.model.OWLOntologyCreationException; -import org.semanticweb.owlapi.model.OWLOntologyStorageException; - +import org.semanticweb.owlapi.model.*; import uk.ac.ox.cs.JRDFox.model.Individual; import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; import uk.ac.ox.cs.pagoda.owl.OWLHelper; import uk.ac.ox.cs.pagoda.owl.QueryRoller; -import uk.ac.ox.cs.pagoda.query.AnswerTuple; -import uk.ac.ox.cs.pagoda.query.AnswerTuples; -import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp; -import uk.ac.ox.cs.pagoda.query.GapByStore4ID; -import uk.ac.ox.cs.pagoda.query.QueryRecord; +import uk.ac.ox.cs.pagoda.query.*; import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.util.Utility; -public class HermiTReasoner extends QueryReasoner { +import java.io.File; +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +class HermiTReasoner extends QueryReasoner { Reasoner hermit; @@ -54,15 +44,8 @@ public class HermiTReasoner extends QueryReasoner { OWLOntology tbox = onto; try { onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); - importedOntologyPath = OWLHelper.getOntologyPath(onto); - } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (OWLOntologyStorageException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block + importedOntologyPath = OWLHelper.getOntologyPath(onto); + } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) { e.printStackTrace(); } diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index b10e2d1..6adbd37 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java @@ -25,7 +25,7 @@ import uk.ac.ox.cs.pagoda.util.tuples.Tuple; import java.util.Collection; import java.util.HashMap; -public class MyQueryReasoner extends QueryReasoner { +class MyQueryReasoner extends QueryReasoner { OWLOntology ontology; DatalogProgram program; diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java index 3894874..60f34c6 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java @@ -10,18 +10,17 @@ import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; -public class RLQueryReasoner extends QueryReasoner { +class RLQueryReasoner extends QueryReasoner { RDFoxQueryEngine rlLowerStore = null; - LowerDatalogProgram program; + LowerDatalogProgram program; + Timer t = new Timer(); public RLQueryReasoner() { - rlLowerStore = new BasicQueryEngine("rl"); + rlLowerStore = new BasicQueryEngine("rl"); } - Timer t = new Timer(); - @Override public void evaluate(QueryRecord queryRecord) { AnswerTuples rlAnswer = null; @@ -56,10 +55,8 @@ public class RLQueryReasoner extends QueryReasoner { public boolean preprocess() { rlLowerStore.importRDFData("data", importedData.toString()); rlLowerStore.materialise("lower program", program.toString()); - - if (!isConsistent()) - return false; - return true; + + return isConsistent(); } @Override diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java index fe4022d..e5564d9 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java @@ -1,7 +1,6 @@ package uk.ac.ox.cs.pagoda.reasoner; import org.semanticweb.owlapi.model.OWLOntology; - import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; import uk.ac.ox.cs.pagoda.query.AnswerTuples; @@ -12,26 +11,25 @@ import uk.ac.ox.cs.pagoda.rules.DatalogProgram; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; -public class RLUQueryReasoner extends QueryReasoner { +class RLUQueryReasoner extends QueryReasoner { DatalogProgram program; BasicQueryEngine rlLowerStore, rlUpperStore; - boolean multiStageTag, equalityTag; + boolean multiStageTag, equalityTag; + Timer t = new Timer(); public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { this.multiStageTag = multiStageTag; - this.equalityTag = considerEqualities; - rlLowerStore = new BasicQueryEngine("rl-lower-bound"); - if (!multiStageTag) - rlUpperStore = new BasicQueryEngine("rl-upper-bound"); - else - rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); + this.equalityTag = considerEqualities; + rlLowerStore = new BasicQueryEngine("rl-lower-bound"); + if(!multiStageTag) + rlUpperStore = new BasicQueryEngine("rl-upper-bound"); + else + rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); } - Timer t = new Timer(); - @Override public void evaluate(QueryRecord queryRecord) { AnswerTuples ans = null; @@ -100,11 +98,9 @@ public class RLUQueryReasoner extends QueryReasoner { rlUpperStore.importRDFData("data", datafile); rlUpperStore.materialiseRestrictedly(program, null); - - if (!isConsistent()) - return false; - return true; + return isConsistent(); + } @Override diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java index 20ae53b..3f1ed7e 100644 --- a/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java +++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java @@ -1,13 +1,14 @@ package uk.ac.ox.cs.pagoda.rules.approximators; import org.semanticweb.HermiT.model.*; -import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID; import uk.ac.ox.cs.pagoda.multistage.MultiStageUpperProgram; import uk.ac.ox.cs.pagoda.rules.ExistConstantApproximator; import uk.ac.ox.cs.pagoda.util.tuples.Tuple; import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; /** * Approximates existential rules through a limited form of Skolemisation. @@ -24,7 +25,6 @@ public class LimitedSkolemisationApproximator implements TupleDependentApproxima private final int maxTermDepth; private final TupleDependentApproximator alternativeApproximator; private final SkolemTermsManager skolemTermsManager; - private Map mapIndividualsToDepth; public LimitedSkolemisationApproximator(int maxTermDepth) { this(maxTermDepth, new ExistConstantApproximator()); @@ -33,7 +33,6 @@ public class LimitedSkolemisationApproximator implements TupleDependentApproxima public LimitedSkolemisationApproximator(int maxTermDepth, TupleDependentApproximator alternativeApproximator) { this.maxTermDepth = maxTermDepth; this.alternativeApproximator = alternativeApproximator; - this.mapIndividualsToDepth = new HashMap<>(); this.skolemTermsManager = SkolemTermsManager.getInstance(); } @@ -144,7 +143,7 @@ public class LimitedSkolemisationApproximator implements TupleDependentApproxima public int getMaxDepth(Tuple violationTuple) { int maxDepth = 0; for (Individual individual : violationTuple) - maxDepth = Integer.max(maxDepth, skolemTermsManager.getDepth(individual)); + maxDepth = Integer.max(maxDepth, skolemTermsManager.getDepthOf(individual)); return maxDepth; } diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java index 0c12a27..368c014 100644 --- a/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java +++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java @@ -12,38 +12,56 @@ import java.util.Map; */ public class SkolemTermsManager { - public static final String skolemisedIndividualPrefix = Namespace.PAGODA_ANONY + "individual"; + public static final String SKOLEMISED_INDIVIDUAL_PREFIX = Namespace.PAGODA_ANONY + "individual"; private static SkolemTermsManager skolemTermsManager; - private int individualCounter = 0; - private Map termNumber = new HashMap<>(); - private Map mapIndividualToDepth = new HashMap<>(); + private int termsCounter = 0; + private Map mapClauseToId = new HashMap<>(); + private Map mapTermToDepth = new HashMap<>(); private int dependenciesCounter = 0; - // replace with hashcode. in case of collision you get only a different upper bound model. - // or, better, use perfect hashing (i.e. devise an ad-hoc hash function without collisions) + // TODO replace with hashcode. in case of collision you get only a different upper bound model. + // TODO you can use a cash. private Map, Integer> mapDependencyToId = new HashMap<>(); + private SkolemTermsManager() { + } + + public static int indexOfSkolemisedIndividual(Atom atom) { + Term t; + for(int index = 0; index < atom.getArity(); ++index) { + t = atom.getArgument(index); + if(t instanceof Individual && ((Individual) t).getIRI().contains(SKOLEMISED_INDIVIDUAL_PREFIX)) + return index; + } + return -1; + } + + public static SkolemTermsManager getInstance() { + if(skolemTermsManager == null) skolemTermsManager = new SkolemTermsManager(); + return skolemTermsManager; + } + /** * Get a fresh Individual, unique for the clause, the offset and the dependency. * */ public Individual getFreshIndividual(DLClause originalClause, int offset, Tuple dependency) { - if (!termNumber.containsKey(originalClause)) { - termNumber.put(originalClause, individualCounter); - individualCounter += noOfExistential(originalClause); + if(!mapClauseToId.containsKey(originalClause)) { + mapClauseToId.put(originalClause, termsCounter); + termsCounter += noOfExistential(originalClause); } if (!mapDependencyToId.containsKey(dependency)) { mapDependencyToId.put(dependency, dependenciesCounter++); } - String termId = termNumber.get(originalClause) + offset + "_" + mapDependencyToId(dependency); - Individual newIndividual = Individual.create(skolemisedIndividualPrefix + termId); + String termId = mapClauseToId.get(originalClause) + offset + "_" + mapDependencyToId(dependency); + Individual newIndividual = Individual.create(SKOLEMISED_INDIVIDUAL_PREFIX + termId); int depth = 0; for (Individual individual : dependency) depth = Integer.max(depth, mapIndividualToDepth(individual)); - mapIndividualToDepth.put(newIndividual, depth); + mapTermToDepth.put(newIndividual, depth); return newIndividual; } @@ -52,14 +70,14 @@ public class SkolemTermsManager { * Get a fresh Individual, unique for the clause and the offset. * */ public Individual getFreshIndividual(DLClause originalClause, int offset) { - if (!termNumber.containsKey(originalClause)) { - termNumber.put(originalClause, individualCounter); - individualCounter += noOfExistential(originalClause); + if(!mapClauseToId.containsKey(originalClause)) { + mapClauseToId.put(originalClause, termsCounter); + termsCounter += noOfExistential(originalClause); } - String termId = "" + termNumber.get(originalClause) + offset; - Individual newIndividual = Individual.create(skolemisedIndividualPrefix + termId); - mapIndividualToDepth.put(newIndividual, 0); + String termId = "" + mapClauseToId.get(originalClause) + offset; + Individual newIndividual = Individual.create(SKOLEMISED_INDIVIDUAL_PREFIX + termId); + mapTermToDepth.put(newIndividual, 0); return newIndividual; } @@ -69,7 +87,7 @@ public class SkolemTermsManager { *

* The term must have been generated by this manager. * */ - public int getDepth(Individual individual) { + public int getDepthOf(Individual individual) { return mapIndividualToDepth(individual); } @@ -77,24 +95,7 @@ public class SkolemTermsManager { * Get the number of individuals generated by this manager. * */ public int getNumberOfSkolemisedIndividual() { - return mapIndividualToDepth.keySet().size(); - } - - public static int indexOfSkolemisedIndividual(Atom atom) { - Term t; - for (int index = 0; index < atom.getArity(); ++index) { - t = atom.getArgument(index); - if (t instanceof Individual && ((Individual) t).getIRI().contains(skolemisedIndividualPrefix)) return index; - } - return -1; - } - - private SkolemTermsManager() { - } - - public static SkolemTermsManager getInstance() { - if (skolemTermsManager == null) skolemTermsManager = new SkolemTermsManager(); - return skolemTermsManager; + return mapTermToDepth.keySet().size(); } private int mapDependencyToId(Tuple dependency) { @@ -103,7 +104,7 @@ public class SkolemTermsManager { } private int mapIndividualToDepth(Individual dependency) { - if (mapIndividualToDepth.containsKey(dependency)) return mapIndividualToDepth.get(dependency); + if(mapTermToDepth.containsKey(dependency)) return mapTermToDepth.get(dependency); else return 0; } diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java index 2143b03..d96c747 100644 --- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java +++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java @@ -17,95 +17,98 @@ import java.util.Set; public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap { + private Set disjunctiveRules = new HashSet(); + private Variable X = Variable.create("X"), Y = Variable.create("Y"); + private String bottomTrackingProgram = null; + public TrackingRuleEncoderDisjVar1(UpperDatalogProgram program, BasicQueryEngine store) { super(program, store); } - private Set disjunctiveRules = new HashSet(); - @Override public boolean encodingRules() { if (super.encodingRules()) { processDisjunctiveRules(); - return true; + return true; } - return false; + return false; } @Override protected void encodingRule(DLClause clause) { if (currentQuery.isBottom()) { // super.encodingRule(clause); - encodingBottomQueryClause(clause); - return ; + encodingBottomQueryClause(clause); + return; } - + DLClause original = program.getCorrespondingClause(clause); if (original.getHeadLength() <= 1) { super.encodingRule(clause); - } + } else { if (!DLClauseHelper.hasSubsetBodyAtoms(clause, original)) super.encodingRule(clause); addDisjunctiveRule(original); } - + } - private void processDisjunctiveRules() { for (DLClause clause: disjunctiveRules) encodingDisjunctiveRule(clause); } - + private Atom getAuxiliaryAtom(Atom headAtom) { - DLPredicate p = headAtom.getDLPredicate(); + DLPredicate p = headAtom.getDLPredicate(); if (p instanceof AtLeast || p instanceof AtLeast) { - return Atom.create(generateAuxiliaryRule((AtLeast) p, true), headAtom.getArgument(0)); + return Atom.create(generateAuxiliaryRule((AtLeast) p, true), headAtom.getArgument(0)); } - if (p instanceof AtomicConcept) - return Atom.create(generateAuxiliaryRule((AtomicConcept) p), headAtom.getArgument(0)); - if (p instanceof AtomicRole) + if(p instanceof AtomicConcept) + return Atom.create(generateAuxiliaryRule((AtomicConcept) p), headAtom.getArgument(0)); + if(p instanceof AtomicRole) return Atom.create(generateAuxiliaryRule((AtomicRole) p), headAtom.getArgument(0), headAtom.getArgument(1)); - if (p instanceof Equality || p instanceof AnnotatedEquality) - return Atom.create(generateAuxiliaryRule(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); - if (p instanceof Inequality) - return Atom.create(generateAuxiliaryRule((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1)); + if(p instanceof Equality || p instanceof AnnotatedEquality) + return Atom.create(generateAuxiliaryRule(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); + if(p instanceof Inequality) + return Atom.create(generateAuxiliaryRule((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1)); return null; } private Atom getTrackingAtom(Atom headAtom) { - DLPredicate p = headAtom.getDLPredicate(); + DLPredicate p = headAtom.getDLPredicate(); if (p instanceof AtLeast) { - p = Normalisation.toAtLeastConcept((AtLeast) p); - return Atom.create(getTrackingDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom.getArgument(0)); + p = Normalisation.toAtLeastConcept((AtLeast) p); + return Atom.create(getTrackingDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom + .getArgument(0)); } - if (p instanceof AtomicConcept) + if(p instanceof AtomicConcept) return Atom.create(getTrackingDLPredicate(p), headAtom.getArgument(0)); - if (p instanceof AtomicRole) + if(p instanceof AtomicRole) return Atom.create(getTrackingDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1)); - if (p instanceof Equality || p instanceof AnnotatedEquality) - return Atom.create(getTrackingDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); - if (p instanceof Inequality) + if(p instanceof Equality || p instanceof AnnotatedEquality) + return Atom.create(getTrackingDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); + if(p instanceof Inequality) return Atom.create(getTrackingDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1)); return null; } private Atom getGapAtom(Atom headAtom) { - DLPredicate p = headAtom.getDLPredicate(); + DLPredicate p = headAtom.getDLPredicate(); if (p instanceof AtLeast) { - p = Normalisation.toAtLeastConcept((AtLeast) p); - return Atom.create(getGapDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom.getArgument(0)); + p = Normalisation.toAtLeastConcept((AtLeast) p); + return Atom.create(getGapDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom + .getArgument(0)); } - if (p instanceof AtomicConcept) + if(p instanceof AtomicConcept) return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0)); - if (p instanceof AtomicRole) + if(p instanceof AtomicRole) return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1)); - if (p instanceof Equality || p instanceof AnnotatedEquality) - return Atom.create(getGapDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); - if (p instanceof Inequality) + if(p instanceof Equality || p instanceof AnnotatedEquality) + return Atom.create(getGapDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); + if(p instanceof Inequality) return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1)); if (p instanceof DatatypeRestriction) return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0)); @@ -115,71 +118,71 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap { private void encodingDisjunctiveRule(DLClause clause) { int headLength = clause.getHeadLength(); - + Atom[] auxAtoms = new Atom[headLength]; for (int i = 0; i < headLength; ++i) auxAtoms[i] = getAuxiliaryAtom(clause.getHeadAtom(i)); - + Atom[] trackingAtoms = new Atom[headLength]; for (int i = 0; i < headLength; ++i) trackingAtoms[i] = getTrackingAtom(clause.getHeadAtom(i)); - + Atom[] gapAtoms = new Atom[headLength]; for (int i = 0; i < headLength; ++i) gapAtoms[i] = getGapAtom(clause.getHeadAtom(i)); - + Atom[] bodyAtoms = clause.getBodyAtoms(); - + LinkedList newHeadAtoms = new LinkedList(); - DLPredicate selected = AtomicConcept.create(getSelectedPredicate()); + DLPredicate selected = AtomicConcept.create(getSelectedPredicate()); newHeadAtoms.add(Atom.create(selected, getIndividual4GeneralRule(clause))); - + for (Atom atom: bodyAtoms) { Atom newAtom = Atom.create( - getTrackingDLPredicate(atom.getDLPredicate()), + getTrackingDLPredicate(atom.getDLPredicate()), DLClauseHelper.getArguments(atom)); newHeadAtoms.add(newAtom); } DLClause newClause; - int index; + int index; for (int j = 0; j < headLength; ++j) { Atom[] newBodyAtoms = new Atom[headLength * 2 + bodyAtoms.length]; - index = 0; + index = 0; for (int i = 0; i < headLength; ++i, ++index) - newBodyAtoms[index] = gapAtoms[i]; + newBodyAtoms[index] = gapAtoms[i]; for (int i = 0; i < headLength; ++i, ++index) if (i != j) newBodyAtoms[index] = auxAtoms[i]; - else - newBodyAtoms[index] = trackingAtoms[i]; - + else + newBodyAtoms[index] = trackingAtoms[i]; + for (int i = 0; i < bodyAtoms.length; ++i, ++index) - newBodyAtoms[index] = bodyAtoms[i]; - + newBodyAtoms[index] = bodyAtoms[i]; + for (Atom atom: newHeadAtoms) { - newClause = DLClause.create(new Atom[] {atom}, newBodyAtoms); + newClause = DLClause.create(new Atom[]{atom}, newBodyAtoms); addTrackingClause(newClause); } } } private void addTrackingClause(DLClause clause) { - trackingClauses.add(clause); + trackingClauses.add(clause); } private void addDisjunctiveRule(DLClause clause) { disjunctiveRules.add(clause); } - + private DLPredicate getAuxPredicate(DLPredicate p) { if (p instanceof AtLeastConcept) { StringBuilder builder = new StringBuilder( Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p)); - builder.append("_AUXa").append(currentQuery.getQueryID()); - return AtomicConcept.create(builder.toString()); + builder.append("_AUXa").append(currentQuery.getQueryID()); + return AtomicConcept.create(builder.toString()); } - + return getDLPredicate(p, "_AUXa" + currentQuery.getQueryID()); } @@ -188,120 +191,118 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap { } private DLPredicate generateAuxiliaryRule(AtLeast p1, boolean withAux) { - AtLeastConcept p = Normalisation.toAtLeastConcept(p1); - - int num = p.getNumber(); - Variable[] Ys = new Variable[num]; + AtLeastConcept p = Normalisation.toAtLeastConcept(p1); + + int num = p.getNumber(); + Variable[] Ys = new Variable[num]; if (num > 1) - for (int i = 0; i < num; ++i) + for(int i = 0; i < num; ++i) Ys[i] = Variable.create("Y" + (i + 1)); - else - Ys[0] = Y; - - Collection expandedAtom = new LinkedList(); - Collection representativeAtom = new LinkedList(); + else + Ys[0] = Y; + + Collection expandedAtom = new LinkedList(); + Collection representativeAtom = new LinkedList(); if (p.getOnRole() instanceof AtomicRole) { - AtomicRole r = (AtomicRole) p.getOnRole(); - for (int i = 0; i < num; ++i) + AtomicRole r = (AtomicRole) p.getOnRole(); + for(int i = 0; i < num; ++i) expandedAtom.add(Atom.create(r, X, Ys[i])); - representativeAtom.add(Atom.create(r, X, Ys[0])); + representativeAtom.add(Atom.create(r, X, Ys[0])); } else { - AtomicRole r = ((InverseRole) p.getOnRole()).getInverseOf(); - for (int i = 0; i < num; ++i) + AtomicRole r = ((InverseRole) p.getOnRole()).getInverseOf(); + for(int i = 0; i < num; ++i) expandedAtom.add(Atom.create(r, Ys[i], X)); - representativeAtom.add(Atom.create(r, Ys[0], X)); + representativeAtom.add(Atom.create(r, Ys[0], X)); } - + if (num > 1) { - representativeAtom.add(Atom.create(Inequality.INSTANCE, Ys[0], Ys[1])); + representativeAtom.add(Atom.create(Inequality.INSTANCE, Ys[0], Ys[1])); } for (int i = 0; i < num; ++i) for (int j = i + 1; j < num; ++j) - expandedAtom.add(Atom.create(Inequality.INSTANCE, Ys[i], Ys[j])); - + expandedAtom.add(Atom.create(Inequality.INSTANCE, Ys[i], Ys[j])); + if (!p.getToConcept().equals(AtomicConcept.THING)) { - AtomicConcept c; - if (p.getToConcept() instanceof AtomicConcept) + AtomicConcept c; + if(p.getToConcept() instanceof AtomicConcept) c = (AtomicConcept) p.getToConcept(); else { c = OverApproxExist.getNegationConcept(((AtomicNegationConcept) p.getToConcept()).getNegatedAtomicConcept()); } for (int i = 0; i < num; ++i) - expandedAtom.add(Atom.create(c, Ys[i])); + expandedAtom.add(Atom.create(c, Ys[i])); representativeAtom.add(Atom.create(c, Ys[0])); } AtomicConcept ac = AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct(p)); - DLPredicate trackingPredicate = getTrackingDLPredicate(ac); - DLPredicate gapPredicate = getGapDLPredicate(ac); + DLPredicate trackingPredicate = getTrackingDLPredicate(ac); + DLPredicate gapPredicate = getGapDLPredicate(ac); DLPredicate auxPredicate = withAux ? getAuxPredicate(p) : null; - + for (Atom atom: representativeAtom) { - Atom[] bodyAtoms = new Atom[expandedAtom.size() + 1]; + Atom[] bodyAtoms = new Atom[expandedAtom.size() + 1]; if (atom.getArity() == 1) bodyAtoms[0] = Atom.create(getTrackingDLPredicate(atom.getDLPredicate()), atom.getArgument(0)); - else + else bodyAtoms[0] = Atom.create(getTrackingDLPredicate(atom.getDLPredicate()), atom.getArgument(0), atom.getArgument(1)); - int i = 0; + int i = 0; for (Atom bodyAtom: expandedAtom) - bodyAtoms[++i] = bodyAtom; + bodyAtoms[++i] = bodyAtom; addTrackingClause(DLClause.create(new Atom[] {Atom.create(trackingPredicate, X)}, bodyAtoms)); - - bodyAtoms = new Atom[expandedAtom.size() + 1]; + + bodyAtoms = new Atom[expandedAtom.size() + 1]; if (atom.getArity() == 1) bodyAtoms[0] = Atom.create(getGapDLPredicate(atom.getDLPredicate()), atom.getArgument(0)); - else + else bodyAtoms[0] = Atom.create(getGapDLPredicate(atom.getDLPredicate()), atom.getArgument(0), atom.getArgument(1)); - i = 0; + i = 0; for (Atom bodyAtom: expandedAtom) - bodyAtoms[++i] = bodyAtom; + bodyAtoms[++i] = bodyAtom; addTrackingClause(DLClause.create(new Atom[] {Atom.create(gapPredicate, X)}, bodyAtoms)); - + if (withAux) { - bodyAtoms = new Atom[expandedAtom.size() + 1]; + bodyAtoms = new Atom[expandedAtom.size() + 1]; bodyAtoms[0] = getAuxiliaryAtom(atom); - i = 0; + i = 0; for (Atom bodyAtom: expandedAtom) - bodyAtoms[++i] = bodyAtom; + bodyAtoms[++i] = bodyAtom; addTrackingClause(DLClause.create(new Atom[] {Atom.create(auxPredicate, X)}, bodyAtoms)); } } - + return withAux ? auxPredicate : trackingPredicate; } private DLPredicate generateAuxiliaryRule(AtomicRole p) { - if (currentQuery.isBottom()) + if(currentQuery.isBottom()) return getTrackingDLPredicate(p); - - DLPredicate ret = getAuxPredicate(p); + + DLPredicate ret = getAuxPredicate(p); Atom[] headAtom = new Atom[] {Atom.create(ret, X, Y)}; addTrackingClause( - DLClause.create(headAtom, new Atom[] {Atom.create(getTrackingDLPredicate(p), X, Y)})); + DLClause.create(headAtom, new Atom[]{Atom.create(getTrackingDLPredicate(p), X, Y)})); addTrackingClause( - DLClause.create(headAtom, new Atom[] {Atom.create(getTrackingBottomDLPredicate(p), X, Y)})); - - return ret; + DLClause.create(headAtom, new Atom[]{Atom.create(getTrackingBottomDLPredicate(p), X, Y)})); + + return ret; } - - private Variable X = Variable.create("X"), Y = Variable.create("Y"); private DLPredicate generateAuxiliaryRule(AtomicConcept p) { if (currentQuery.isBottom()) - return getTrackingDLPredicate(p); - - DLPredicate ret = getAuxPredicate(p); - Atom[] headAtom = new Atom[] {Atom.create(ret, X)}; + return getTrackingDLPredicate(p); + + DLPredicate ret = getAuxPredicate(p); + Atom[] headAtom = new Atom[]{Atom.create(ret, X)}; addTrackingClause( - DLClause.create(headAtom, - new Atom[] { Atom.create(getTrackingDLPredicate(p), X)})); + DLClause.create(headAtom, + new Atom[]{Atom.create(getTrackingDLPredicate(p), X)})); addTrackingClause( - DLClause.create(headAtom, + DLClause.create(headAtom, new Atom[] { Atom.create(getTrackingBottomDLPredicate(p), X)})); - - return ret; + + return ret; } private DLPredicate generateAuxiliaryRule(Equality instance) { @@ -309,20 +310,18 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap { } private DLPredicate generateAuxiliaryRule(Inequality instance) { - return generateAuxiliaryRule(AtomicRole.create(Namespace.INEQUALITY)); + return generateAuxiliaryRule(AtomicRole.create(Namespace.INEQUALITY)); } - + @Override public String getTrackingProgram() { StringBuilder sb = getTrackingProgramBody(); if (currentQuery.isBottom()) - sb.append(getBottomTrackingProgram()); - sb.insert(0, MyPrefixes.PAGOdAPrefixes.prefixesText()); - return sb.toString(); + sb.append(getBottomTrackingProgram()); + sb.insert(0, MyPrefixes.PAGOdAPrefixes.prefixesText()); + return sb.toString(); } - private String bottomTrackingProgram = null; - private String getBottomTrackingProgram() { if (bottomTrackingProgram != null) return bottomTrackingProgram.replace("_tn", getTrackingPredicate("")); @@ -350,7 +349,7 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap { // for (Atom tAtom: clause.getHeadAtoms()) { // for (int i = 0; i < tAtom.getArity(); ++i) // if ((t = tAtom.getArgument(i)) instanceof Individual) -// if (((Individual) t).getIRI().startsWith(OverApproxExist.skolemisedIndividualPrefix)) +// if (((Individual) t).getIRI().startsWith(OverApproxExist.SKOLEMISED_INDIVIDUAL_PREFIX)) // clause = program.getCorrespondingClause(clause); // } -- cgit v1.2.3