From 17bd9beaf7f358a44e5bf36a5855fe6727d506dc Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Tue, 10 May 2022 18:17:06 +0100 Subject: [pagoda] Move project to Scala This commit includes a few changes: - The repository still uses Maven to manage dependency but it is now a Scala project. - The code has been ported from OWLAPI 3.4.10 to 5.1.20 - A proof of concept program using both RSAComb and PAGOdA has been added. --- .../uk/ac/ox/cs/pagoda/rules/ApproxProgram.java | 106 ++++++ .../uk/ac/ox/cs/pagoda/rules/DatalogProgram.java | 81 +++++ .../ac/ox/cs/pagoda/rules/DisjunctiveProgram.java | 20 ++ .../ac/ox/cs/pagoda/rules/EqualityAxiomatiser.java | 91 +++++ .../cs/pagoda/rules/ExistConstantApproximator.java | 26 ++ .../ac/ox/cs/pagoda/rules/ExistentialProgram.java | 20 ++ .../cs/pagoda/rules/ExistentialToDisjunctive.java | 75 ++++ .../uk/ac/ox/cs/pagoda/rules/GeneralProgram.java | 50 +++ .../ac/ox/cs/pagoda/rules/IncrementalProgram.java | 14 + .../ac/ox/cs/pagoda/rules/LowerDatalogProgram.java | 238 +++++++++++++ .../java/uk/ac/ox/cs/pagoda/rules/Program.java | 384 +++++++++++++++++++++ .../ac/ox/cs/pagoda/rules/UpperDatalogProgram.java | 47 +++ .../uk/ac/ox/cs/pagoda/rules/UpperProgram.java | 12 + .../pagoda/rules/approximators/Approximator.java | 42 +++ .../LimitedSkolemisationApproximator.java | 146 ++++++++ .../pagoda/rules/approximators/OverApproxBoth.java | 24 ++ .../pagoda/rules/approximators/OverApproxDisj.java | 100 ++++++ .../rules/approximators/OverApproxExist.java | 224 ++++++++++++ .../rules/approximators/SkolemTermsManager.java | 139 ++++++++ .../approximators/TupleDependentApproximator.java | 19 + .../uk/ac/ox/cs/pagoda/rules/clauses/Clause.java | 100 ++++++ 21 files changed, 1958 insertions(+) create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/DatalogProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/EqualityAxiomatiser.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/GeneralProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/IncrementalProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/Program.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/UpperProgram.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java create mode 100644 src/main/java/uk/ac/ox/cs/pagoda/rules/clauses/Clause.java (limited to 'src/main/java/uk/ac/ox/cs/pagoda/rules') diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java new file mode 100644 index 0000000..acbf354 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java @@ -0,0 +1,106 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.rules.approximators.Approximator; + +import java.util.*; + +public abstract class ApproxProgram extends Program { + + protected Approximator m_approx = null; + /** + * mapping from over-approximated DLClauses to DLClauses from the original ontology + */ + Map correspondence = new HashMap(); + + protected ApproxProgram() { initApproximator(); } + + protected abstract void initApproximator(); + + @Override + public void transform() { + super.transform(); + Iterator iterClause = transitiveClauses.iterator(); + for (Iterator iterAxiom = transitiveAxioms.iterator(); iterAxiom.hasNext(); ) + addCorrespondence(iterClause.next(), iterAxiom.next()); + + iterClause = subPropChainClauses.iterator(); + for (Iterator iterAxiom = subPropChainAxioms.iterator(); iterAxiom.hasNext(); ) + addCorrespondence(iterClause.next(), iterAxiom.next()); + } + + @Override + public Collection convert2Clauses(DLClause clause) { + Collection ret = botStrategy.process(m_approx.convert(clause, clause)); +// OWLAxiom correspondingAxiom = OWLHelper.getOWLAxiom(ontology, clause); + for (DLClause newClause: ret) { + addCorrespondence(newClause, clause); +// addCorrespondence(newClause, correspondingAxiom); + } + return ret; + } + + private void addCorrespondence(DLClause newClause, Object corresponding) { + Object object; + if ((object = correspondence.get(newClause)) != null) { + if (object.equals(corresponding)) + return ; + + if (object instanceof DLClause) { + DLClause c1 = (DLClause) object; + if (c1.getHeadLength() == 1) return ; + DLClause c2 = (DLClause) corresponding; + if (c2.getHeadLength() == 1) { + correspondence.put(newClause, c2); + return ; + } + ClauseSet list = new ClauseSet(c1, c2); + correspondence.put(newClause, list); + } + else if (object instanceof ClauseSet){ + ClauseSet list = (ClauseSet) object; + list.add((DLClause) corresponding); + } + } + correspondence.put(newClause, corresponding); + } + + public OWLAxiom getEquivalentAxiom(DLClause clause) { + Object obj = correspondence.get(clause); + while (obj != null && obj instanceof DLClause && !obj.equals(clause) && correspondence.containsKey(obj)) + obj = correspondence.get(clause); + if (obj instanceof OWLAxiom) + return (OWLAxiom) obj; + else if (obj != null) + return OWLHelper.getOWLAxiom(ontology, (DLClause) obj); + else { + return OWLHelper.getOWLAxiom(ontology, clause); + } + } + + public DLClause getCorrespondingClause(DLClause clause) { + Object obj = correspondence.get(clause); + if (obj instanceof DLClause) + return (DLClause) obj; + else + return clause; + } +} + +class ClauseSet extends HashSet { + + /** + * + */ + private static final long serialVersionUID = 1L; + + public ClauseSet(DLClause first, DLClause second) { + add(first); + add(second); + } + +} \ No newline at end of file diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/DatalogProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/DatalogProgram.java new file mode 100644 index 0000000..e2a171d --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/DatalogProgram.java @@ -0,0 +1,81 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.semanticweb.owlapi.model.OWLOntology; +import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; +import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; +import uk.ac.ox.cs.pagoda.util.disposable.Disposable; +import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; + +import java.io.InputStream; + +public class DatalogProgram extends Disposable { + + UpperDatalogProgram upperProgram = new UpperDatalogProgram(); + LowerDatalogProgram lowerProgram; + GeneralProgram program = new GeneralProgram(); + + BottomStrategy upperBottom; + + public DatalogProgram(InputStream inputStream) { + lowerProgram = new LowerDatalogProgram(); + + upperProgram.load(inputStream, upperBottom = new UpperUnaryBottom()); + lowerProgram.clone(upperProgram); + program.clone(upperProgram); + + upperProgram.transform(); + lowerProgram.transform(); + program.transform(); + + program.buildDependencyGraph(); + lowerProgram.dependencyGraph = upperProgram.buildDependencyGraph(); + } + + public DatalogProgram(OWLOntology o) { + lowerProgram = new LowerDatalogProgram(); + + upperProgram.load(o, upperBottom = new UpperUnaryBottom()); +// upperProgram.load(o, upperBottom = new UnaryBottom()); + lowerProgram.clone(upperProgram); + program.clone(upperProgram); +// program.botStrategy = new UnaryBottom(); + + upperProgram.transform(); + lowerProgram.transform(); + program.transform(); + + program.buildDependencyGraph(); + lowerProgram.dependencyGraph = upperProgram.buildDependencyGraph(); + } + + public LowerDatalogProgram getLower() { + if(isDisposed()) throw new DisposedException(); + return lowerProgram; + } + + public UpperDatalogProgram getUpper() { + if(isDisposed()) throw new DisposedException(); + return upperProgram; + } + + public GeneralProgram getGeneral() { + if(isDisposed()) throw new DisposedException(); + return program; + } + + public String getAdditionalDataFile() { + if(isDisposed()) throw new DisposedException(); + return upperProgram.getAdditionalDataFile(); + } + + public BottomStrategy getUpperBottomStrategy() { + if(isDisposed()) throw new DisposedException(); + return upperBottom; + } + + @Override + public void dispose() { + super.dispose(); + if(upperProgram != null) upperProgram.deleteABoxTurtleFile(); + } +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java new file mode 100644 index 0000000..d50c2d4 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java @@ -0,0 +1,20 @@ +package uk.ac.ox.cs.pagoda.rules; + +import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist; + +public class DisjunctiveProgram extends UpperProgram { + + @Override + protected void initApproximator() { + m_approx = new OverApproxExist(); + } + +// @Override +// public String getDirectory() { +// File dir = new File(ontologyDirectory + Utility.FILE_SEPARATOR + "disjunctiveRules"); +// if (!dir.exists()) +// dir.mkdirs(); +// return dir.getPath(); +// } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/EqualityAxiomatiser.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/EqualityAxiomatiser.java new file mode 100644 index 0000000..81b8a01 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/EqualityAxiomatiser.java @@ -0,0 +1,91 @@ +package uk.ac.ox.cs.pagoda.rules; + +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLOntology; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.pagoda.util.Utility; + +public class EqualityAxiomatiser { + + OWLOntology ontology; + + public EqualityAxiomatiser(String fileName) { + ontology = OWLHelper.loadOntology(OWLManager.createOWLOntologyManager(), fileName); + } + + public EqualityAxiomatiser(OWLOntology ontology) { + this.ontology = ontology; + } + + public static void main(String[] args) throws IOException { + if (args.length == 0) { + args = new String[1]; + args[0] = "../uobmGenerator/ontologies/2rl/univ-bench-dl-TBox.owl"; + } + + EqualityAxiomatiser axiomatiser; + for (String fileName: args) { + axiomatiser = new EqualityAxiomatiser(fileName); + String outputFileName = fileName.replace(".owl", "-axiomatised.rule"); + BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFileName))); + writer.write(axiomatiser.getRuleTexts()); + writer.close(); + } + } + + public String getRuleTexts() { + StringBuffer buf = new StringBuffer(); +// buf.append(reflexivity()).append(Utility.LINE_SEPARATOR); + buf.append(symmetry()).append(Utility.LINE_SEPARATOR); + buf.append(transitivity()).append(Utility.LINE_SEPARATOR); + + for (OWLObjectProperty p: ontology.getObjectPropertiesInSignature(true)) + buf.append(addingEqualities4Properties(OWLHelper.addAngles(p.getIRI().toString()))).append(Utility.LINE_SEPARATOR); + + for (OWLClass c: ontology.getClassesInSignature(true)) + buf.append(addingEqualities4Class(OWLHelper.addAngles(c.getIRI().toString()))).append(Utility.LINE_SEPARATOR); + + return buf.toString(); + } + + private static String transitivity() { + StringBuffer buffer = new StringBuffer(); + buffer.append(Namespace.EQUALITY_QUOTED).append("(?Y0,?Y2) :- ").append(Namespace.EQUALITY_QUOTED ).append("(?Y0,?Y1), ").append(Namespace.EQUALITY_QUOTED ).append("(?Y1,?Y2) ."); + return buffer.toString(); + } + + private static String symmetry() { + StringBuffer buffer = new StringBuffer(); + buffer.append(Namespace.EQUALITY_QUOTED ).append("(?Y1,?Y0) :- ").append(Namespace.EQUALITY_QUOTED ).append("(?Y0,?Y1) ."); + return buffer.toString(); + } + + @SuppressWarnings("unused") + private static String reflexivity() { + StringBuffer buffer = new StringBuffer(); + buffer.append(Namespace.EQUALITY_QUOTED ).append("(?Y0,?Y0) :- ."); + return buffer.toString(); + } + + private static String addingEqualities4Properties(String property) { + StringBuffer buffer = new StringBuffer(); + buffer.append(property).append("(?Y2,?Y1) :- ").append(property).append("(?Y0,?Y1), ").append(Namespace.EQUALITY_QUOTED ).append("(?Y0,?Y2) .\n"); + buffer.append(property).append("(?Y0,?Y2) :- ").append(property).append("(?Y0,?Y1), ").append(Namespace.EQUALITY_QUOTED ).append("(?Y1,?Y2) ."); + return buffer.toString(); + } + + private static String addingEqualities4Class(String clazz) { + StringBuffer buffer = new StringBuffer(); + buffer.append(clazz).append("(?Y1) :- ").append(clazz).append("(?Y0), ").append(Namespace.EQUALITY_QUOTED ).append("(?Y0,?Y1) ."); + return buffer.toString(); + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java new file mode 100644 index 0000000..a7afa2e --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java @@ -0,0 +1,26 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.HermiT.model.Individual; +import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist; +import uk.ac.ox.cs.pagoda.rules.approximators.TupleDependentApproximator; +import uk.ac.ox.cs.pagoda.util.tuples.Tuple; + +import java.util.Collection; + +/** + * A wrapper for OverApproxExist. + * */ +public class ExistConstantApproximator implements TupleDependentApproximator { + + private final OverApproxExist overApproxExist; + + public ExistConstantApproximator() { + overApproxExist = new OverApproxExist(); + } + + @Override + public Collection convert(DLClause clause, DLClause originalClause, Collection> violationTuples) { + return overApproxExist.convert(clause, originalClause); + } +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java new file mode 100644 index 0000000..e825917 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java @@ -0,0 +1,20 @@ +package uk.ac.ox.cs.pagoda.rules; + +import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxDisj; + +public class ExistentialProgram extends UpperProgram { + +// @Override +// public String getDirectory() { +// File dir = new File(ontologyDirectory + Utility.FILE_SEPARATOR + "existential"); +// if (!dir.exists()) +// dir.mkdirs(); +// return dir.getPath(); +// } + + @Override + protected void initApproximator() { + m_approx = new OverApproxDisj(); + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java new file mode 100644 index 0000000..2098f73 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java @@ -0,0 +1,75 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.semanticweb.HermiT.model.*; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLOntology; +import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; +import uk.ac.ox.cs.pagoda.rules.approximators.Approximator; +import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist; + +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; + +public class ExistentialToDisjunctive extends UpperProgram { + + Set inverseFuncProperties = new HashSet(); + + @Override + public void load(OWLOntology o, BottomStrategy bottomStrategy) { + super.load(o, bottomStrategy); + for (OWLObjectProperty prop: ontology.getObjectPropertiesInSignature(true)) + if (!(ontology.getInverseFunctionalObjectPropertyAxioms(prop).isEmpty())) + inverseFuncProperties.add(prop.getIRI().toString()); + ((RefinedOverApproxExist) m_approx).setInverseFuncProps(inverseFuncProperties); + } + + @Override + protected void initApproximator() { + m_approx = new RefinedOverApproxExist(); + } + +} + +class RefinedOverApproxExist implements Approximator { + + Approximator approxExist = new OverApproxExist(); + Set inverseFuncProperties; + + public void setInverseFuncProps(Set set) { + inverseFuncProperties = set; + } + + @Override + public Collection convert(DLClause clause, DLClause originalClause) { + DLPredicate p; + Collection newHeadAtoms = new LinkedList(); + for (Atom headAtom: clause.getHeadAtoms()) + newHeadAtoms.add(headAtom); + + for (Atom headAtom: clause.getHeadAtoms()) { + p = headAtom.getDLPredicate(); + if (isAtLeastOneOnInverseFuncProperties(p)) + newHeadAtoms.add(headAtom); + } + + if (newHeadAtoms.size() > clause.getHeadLength()) + clause = DLClause.create(newHeadAtoms.toArray(new Atom[0]), clause.getBodyAtoms()); + + return approxExist.convert(clause, clause); + } + + private boolean isAtLeastOneOnInverseFuncProperties(DLPredicate predicate) { + if (!(predicate instanceof AtLeastConcept)) + return false; + AtLeastConcept atLeast = (AtLeastConcept) predicate; + if (!(atLeast.getOnRole() instanceof AtomicRole)) + return false; + + return atLeast.getNumber() == 1 && inverseFuncProperties.contains(((AtomicRole) atLeast.getOnRole()).getIRI()); + } + + +} + diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/GeneralProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/GeneralProgram.java new file mode 100644 index 0000000..e390a29 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/GeneralProgram.java @@ -0,0 +1,50 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.apache.commons.io.FilenameUtils; +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.owlapi.model.OWLOntology; +import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; + +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +public class GeneralProgram extends Program { + + public GeneralProgram(Set relevantClauses, OWLOntology relevantOntology) { + ontology = relevantOntology; + + ontologyDirectory = null; +// dlOntology = null; + botStrategy = new UnaryBottom(); + + clauses = botStrategy.process(relevantClauses); + } + + public GeneralProgram() {} + + public Collection convert2Clauses(DLClause clause) { + return botStrategy.process(Collections.singleton(clause)); + } + + @Override + public String getOutputPath() { + return FilenameUtils.concat(getDirectory(), "rules.dlog"); + } + +// @Override +// public String getDirectory() { +// File dir = new File(ontologyDirectory + Utility.FILE_SEPARATOR + "general"); +// if (!dir.exists()) +// dir.mkdirs(); +// return dir.getPath(); +// } + + public boolean isHorn() { + for (DLClause clause: clauses) + if (clause.getHeadLength() > 1) + return false; + return true; + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/IncrementalProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/IncrementalProgram.java new file mode 100644 index 0000000..339738a --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/IncrementalProgram.java @@ -0,0 +1,14 @@ +package uk.ac.ox.cs.pagoda.rules; + +import java.util.Collection; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.owlapi.model.OWLOntology; + +public interface IncrementalProgram { + + public OWLOntology getOntology(); + + public void enrich(Collection delta); + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java new file mode 100644 index 0000000..a2676e8 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java @@ -0,0 +1,238 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.apache.commons.io.FilenameUtils; +import org.semanticweb.HermiT.Reasoner; +import org.semanticweb.HermiT.model.*; +import org.semanticweb.owlapi.model.*; +import org.semanticweb.owlapi.reasoner.Node; +import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; +import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; +import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; +import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; +import uk.ac.ox.cs.pagoda.multistage.Normalisation; +import uk.ac.ox.cs.pagoda.multistage.RestrictedApplication; +import uk.ac.ox.cs.pagoda.rules.approximators.Approximator; +import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.Utility; + +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.Set; + +public class LowerDatalogProgram extends ApproxProgram implements IncrementalProgram { + + boolean m_toClassify; + + public LowerDatalogProgram() { + m_toClassify = true; + } + + public LowerDatalogProgram(boolean toClassify) { + m_toClassify = toClassify; // false; // + } + + void clone(Program program) { + super.clone(program); + if (botStrategy instanceof UpperUnaryBottom) + botStrategy = new UnaryBottom(); + } + + + // TODO -RULE- filter out unsafe rules + @Override + public void transform() { + if (m_toClassify) { + ClassifyThread thread = new ClassifyThread(this); + thread.start(); + super.transform(); + try { + thread.join(5000); + } catch (InterruptedException e) { + return ; + } + if (!thread.isAlive()) thread.dispose(); + else thread.interrupt(); + } + else + super.transform(); + + Normalisation norm = new Normalisation(dlClauses, ontology, new NullaryBottom()); + BottomStrategy tBottom = new NullaryBottom(); + norm.process(); + for (DLClause nClause: norm.getNormlisedClauses()) { + if (nClause.getHeadLength() != 1) + for (DLClause newClause: RestrictedApplication.addAdditionalDatalogRules(nClause, tBottom, norm)) { +// System.out.println(newClause); + if (newClause.getHeadAtom(0).getDLPredicate() instanceof AtomicConcept || newClause.getHeadAtom(0).getDLPredicate() instanceof AtomicRole) { +// System.out.println(newClause); + clauses.add(newClause); + } + } + + if (nClause.getHeadLength() == 1 && (nClause.getHeadAtom(0).getDLPredicate() instanceof AtomicConcept || nClause.getHeadAtom(0).getDLPredicate() instanceof AtomicRole) && clauses.add(nClause)) { +// System.out.println(nClause); + } + } + } + + @Override + public String getOutputPath() { + return FilenameUtils.concat(getDirectory(), "lower.dlog"); + } + +// @Override +// public String getDirectory() { +// File dir = new File(ontologyDirectory + Utility.FILE_SEPARATOR + "datalog"); +// if (!dir.exists()) +// dir.mkdirs(); +// return dir.getPath(); +// } + + @Override + public void enrich(Collection delta) { + synchronized (clauses) { + Iterator iter = delta.iterator(); + while (iter.hasNext()) + clauses.add(iter.next()); + } + } + + @Override + public String toString() { + String text; + synchronized (clauses) { + text = super.toString(); + } + return text; + } + + @Override + protected void initApproximator() { + m_approx = new IgnoreBoth(); + } + + private class IgnoreBoth implements Approximator { + + @Override + public Collection convert(DLClause clause, DLClause originalClause) { + Collection ret = new LinkedList(); + + if (clause.getHeadLength() > 1) return ret; + + if (clause.getHeadLength() > 0) { + DLPredicate predicate = clause.getHeadAtom(0).getDLPredicate(); + + if (predicate instanceof AtLeast) return ret; + } + + ret.add(clause); + return ret; + } + } + +} + +class ClassifyThread extends Thread { + + IncrementalProgram m_program; + Collection clauses = new LinkedList(); + + Variable X = Variable.create("X"), Y = Variable.create("Y"); + Reasoner hermitReasoner; + OWLOntology ontology; + ClassifyThread(IncrementalProgram program) { + m_program = program; + } + + @Override + public void run() { + ontology = m_program.getOntology(); + try { + hermitReasoner = new Reasoner(ontology); + Timer t = new Timer(); + hermitReasoner.classifyClasses(); + Utility.logInfo("HermiT classification done: " + t.duration()); + } catch (Exception e) { +// e.printStackTrace(); + Utility.logInfo("HermiT cannot classify the ontology."); + hermitReasoner = null; + } + } + + public void dispose() { + if (hermitReasoner == null) + return ; + Set classes; + OWLClass lastClass = null, currentClass; + for (OWLClass subClass: ontology.getClassesInSignature()) { + Node node = hermitReasoner.getEquivalentClasses(subClass); + if (!subClass.equals(node.getRepresentativeElement())) continue; + + classes = node.getEntities(); + lastClass = subClass; + for (Iterator iter = classes.iterator(); iter.hasNext(); ) { + currentClass = iter.next(); + if (currentClass.equals(subClass)) continue; + addClause(lastClass, currentClass); + lastClass = currentClass; + } + addClause(lastClass, subClass); + + for (Node tNode: hermitReasoner.getSuperClasses(subClass, true)) { + OWLClass superClass = tNode.getRepresentativeElement(); + addClause(subClass, superClass); + } + } + + Set properties; + OWLObjectPropertyExpression lastProperty, currentProperty; + for (OWLObjectProperty subProperty: ontology.getObjectPropertiesInSignature()) { + Node node = hermitReasoner.getEquivalentObjectProperties(subProperty); + if (!subProperty.equals(node.getRepresentativeElement())) continue; + + properties = node.getEntities(); + lastProperty = subProperty; + for (Iterator iter = properties.iterator(); iter.hasNext(); ) { + currentProperty = iter.next(); + if (currentProperty.equals(subProperty)) continue; + addClause(lastProperty, currentProperty); + lastProperty = currentProperty; + } + addClause(lastProperty, subProperty); + + for (Node tNode: hermitReasoner.getSuperObjectProperties(subProperty, true)) { + OWLObjectPropertyExpression superProperty = tNode.getRepresentativeElement(); + addClause(subProperty, superProperty); + } + } + + m_program.enrich(clauses); + Utility.logInfo("classification done and enriched lower bound rules."); + } + + + private void addClause(OWLObjectPropertyExpression subProperty, OWLObjectPropertyExpression superProperty) { + if (subProperty.equals(superProperty)) return ; + if (superProperty.toString().equals("owl:topObjectProperty")) return ; + clauses.add(DLClause.create(new Atom[] { getAtom(superProperty) }, new Atom[] { getAtom(subProperty) })); + } + + private Atom getAtom(OWLObjectPropertyExpression p) { + if (p instanceof OWLObjectInverseOf) + return Atom.create(AtomicRole.create(((OWLObjectProperty) ((OWLObjectInverseOf) p).getInverse()).toStringID()), Y, X); + + return Atom.create(AtomicRole.create(((OWLObjectProperty) p).toStringID()), X, Y); + } + + private void addClause(OWLClass subClass, OWLClass superClass) { + if (subClass.equals(superClass)) return ; + if (subClass.toString().equals("owl:Nothing")) return ; + if (superClass.toString().equals("owl:Thing")) return ; + clauses.add(DLClause.create(new Atom[] { getAtom(superClass) }, new Atom[] { getAtom(subClass) })); + } + + private Atom getAtom(OWLClass c) { + return Atom.create(AtomicConcept.create(c.toStringID()), X); + } +} \ No newline at end of file diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/Program.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/Program.java new file mode 100644 index 0000000..de06f52 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/Program.java @@ -0,0 +1,384 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.apache.commons.io.FilenameUtils; +import org.semanticweb.HermiT.Configuration; +import org.semanticweb.HermiT.model.*; +import org.semanticweb.HermiT.structural.OWLClausification; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.semanticweb.simpleETL.SimpleETL; +import uk.ac.ox.cs.pagoda.MyPrefixes; +import uk.ac.ox.cs.pagoda.approx.KnowledgeBase; +import uk.ac.ox.cs.pagoda.approx.RLPlusOntology; +import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; +import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; +import uk.ac.ox.cs.pagoda.constraints.PredicateDependency; +import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; +import uk.ac.ox.cs.pagoda.hermit.RuleHelper; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.util.Utility; + +import java.io.*; +import java.util.*; + +public abstract class Program implements KnowledgeBase { + + protected String ontologyDirectory = null; + protected OWLOntology ontology; +// protected DLOntology dlOntology; + protected Set dlClauses = new HashSet<>(); + protected Set positiveFacts = new HashSet<>(); + protected BottomStrategy botStrategy; + protected Collection clauses = new HashSet(); +// protected Set used = new HashSet(); +protected PredicateDependency dependencyGraph; + protected LinkedList transitiveAxioms; + protected LinkedList transitiveClauses; + protected LinkedList subPropChainAxioms; + protected LinkedList subPropChainClauses; + private String additionalDataFile = null; + + public static String toString(Collection clauses) { + StringBuilder sb = new StringBuilder(DLClauseHelper.toString(clauses)); + sb.insert(0, MyPrefixes.PAGOdAPrefixes.prefixesText()); + return sb.toString(); + } + + public void load(InputStream rules, BottomStrategy botStrategy) { +// this.botStrategy = botStrategy; +// // fake instantiation + try { + load(OWLManager.createOWLOntologyManager().createOntology(), botStrategy); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } + + try(BufferedReader br = new BufferedReader(new InputStreamReader(rules))) { + String line; + while((line = br.readLine()) != null) + dlClauses.add(RuleHelper.parseClause(line)); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public void load(OWLOntology o, BottomStrategy botStrategy) { + this.botStrategy = botStrategy; + RLPlusOntology owlOntology = new RLPlusOntology(); + owlOntology.load(o, new NullaryBottom()); + owlOntology.simplify(); + + ontology = owlOntology.getTBox(); +// String ontologyPath = OWLHelper.getOntologyPath(ontology); +// ontologyDirectory = ontologyPath.substring(0, ontologyPath.lastIndexOf(Utility.JAVA_FILE_SEPARATOR)); + String ontologyPath = ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology).toURI().getPath(); + ontologyDirectory = FilenameUtils.getFullPath(ontologyPath); + clausify(); + + String aboxOWLFile = owlOntology.getABoxPath(); + OWLOntology abox = OWLHelper.loadOntology(aboxOWLFile); + OWLOntologyManager manager = abox.getOWLOntologyManager(); + OWLAxiom axiom; + for (Atom atom: positiveFacts) { + if ((axiom = OWLHelper.getABoxAssertion(manager.getOWLDataFactory(), atom)) != null) + manager.addAxiom(abox, axiom); + } + + try { + FileOutputStream out = new FileOutputStream(aboxOWLFile); + manager.saveOntology(abox, out); + out.close(); + } catch(IOException | OWLOntologyStorageException e) { + e.printStackTrace(); + System.exit(1); + } + + if (!abox.isEmpty()) { + SimpleETL rewriter = new SimpleETL(owlOntology.getOntologyIRI(), aboxOWLFile); + try { + rewriter.rewrite(); + } catch (Exception e) { + e.printStackTrace(); + } + additionalDataFile = rewriter.getExportedFile(); + new File(aboxOWLFile).delete(); + } + + } + + public String getAdditionalDataFile() { + return additionalDataFile; + } + + @Override + public void transform() { + for(DLClause dlClause : dlClauses) { + DLClause simplifiedDLClause = DLClauseHelper.removeNominalConcept(dlClause); + simplifiedDLClause = removeAuxiliaryBodyAtoms(simplifiedDLClause); + simplifiedDLClause = DLClauseHelper.replaceWithDataValue(simplifiedDLClause); + convert(simplifiedDLClause); + } + + addingTransitiveAxioms(); + addingSubPropertyChainAxioms(); + + Collection botRelated = new LinkedList(); + Variable X = Variable.create("X"); + botRelated.add(DLClause.create(new Atom[0], new Atom[]{Atom.create(Inequality.INSTANCE, X, X)})); + clauses.addAll(botStrategy.process(botRelated)); + + if(this instanceof GeneralProgram) + Utility.logDebug("The number of rules: " + (clauses.size() - 1)); + } + + @Override + public void save() { + try { + BufferedWriter ruleWriter = new BufferedWriter(new OutputStreamWriter( + new FileOutputStream(getOutputPath()))); + ruleWriter.write(toString()); + ruleWriter.close(); + } catch(IOException e) { + e.printStackTrace(); + } + Utility.logInfo("The rules are saved in " + getOutputPath() + "."); + } + + @Override + public String toString() { + return toString(clauses); + } + + public final void convert(DLClause clause) { + Collection tempClauses = convert2Clauses(clause); + clauses.addAll(tempClauses); + } + + public abstract Collection convert2Clauses(DLClause clause); + + public abstract String getOutputPath(); + + public OWLOntology getOntology() { + return ontology; + } + + public Collection getClauses() { + return clauses; + } + + public Collection getClauses(DLClause queryClause) { +// if (true) return new HashSet(clauses); + Set predicates = new HashSet(); + predicates.addAll(dependencyGraph.collectPredicate(queryClause.getBodyAtoms())); + + Set dependence = new HashSet(); + for(DLPredicate predicate : predicates) + dependence.addAll(dependencyGraph.getAncesters(predicate)); + + Collection relevantClauses = new LinkedList(); + for(DLClause clause : clauses) { + if(relevant(clause, dependence)) + relevantClauses.add(clause); + + } + return relevantClauses; + } + + public PredicateDependency buildDependencyGraph() { + if(dependencyGraph == null) + return dependencyGraph = new PredicateDependency(clauses); + else + return dependencyGraph; + } + + public void getDependencyGraph(PredicateDependency g) { + dependencyGraph = g; + } + + public final String getDirectory() { + return Utility.getGlobalTempDirAbsolutePath(); + } + + public void deleteABoxTurtleFile() { + if(additionalDataFile != null) + new File(additionalDataFile).delete(); + } + + /** + * clone all information of another program after load() + * + * @param program + */ + void clone(Program program) { + this.ontologyDirectory = program.ontologyDirectory; + this.ontology = program.ontology; +// this.dlOntology = program.dlOntology; + this.dlClauses = program.dlClauses; + this.positiveFacts = program.positiveFacts; + this.botStrategy = program.botStrategy; + this.additionalDataFile = program.additionalDataFile; + this.transitiveAxioms = program.transitiveAxioms; + this.transitiveClauses = program.transitiveClauses; + this.subPropChainAxioms = program.subPropChainAxioms; + this.subPropChainClauses = program.subPropChainClauses; + } + + private void clausify() { + Configuration conf = new Configuration(); + OWLClausification clausifier = new OWLClausification(conf); + OWLOntology filteredOntology = null; + OWLOntologyManager manager = ontology.getOWLOntologyManager(); + try { + filteredOntology = manager.createOntology(); + } catch(OWLOntologyCreationException e) { + e.printStackTrace(); + } + + transitiveAxioms = new LinkedList(); + subPropChainAxioms = new LinkedList(); + + OWLDatatype date = ontology.getOWLOntologyManager() + .getOWLDataFactory() + .getOWLDatatype(IRI.create("http://www.w3.org/2001/XMLSchema#date")); + int noOfDataPropertyRangeAxioms = 0, noOfAxioms = 0; + for(OWLOntology onto : ontology.getImportsClosure()) + for(OWLAxiom axiom : onto.getAxioms()) { + if(axiom instanceof OWLTransitiveObjectPropertyAxiom) + transitiveAxioms.add((OWLTransitiveObjectPropertyAxiom) axiom); + else if(axiom instanceof OWLSubPropertyChainOfAxiom) + subPropChainAxioms.add((OWLSubPropertyChainOfAxiom) axiom); + // TODO to filter out datatype axioms + else if(axiom instanceof OWLDataPropertyRangeAxiom) { + ++noOfDataPropertyRangeAxioms; + Utility.logInfo("The axiom: " + axiom + " is being ignored."); + } + else { + if(axiom.getDatatypesInSignature().contains(date)) { + Utility.logInfo("The axiom: " + axiom + " is being ignored."); + } + else manager.addAxiom(filteredOntology, axiom); + } + + if(axiom instanceof OWLAnnotationAssertionAxiom || + axiom instanceof OWLSubAnnotationPropertyOfAxiom || + axiom instanceof OWLDeclarationAxiom || + axiom instanceof OWLDataPropertyRangeAxiom) { + } + else { +// System.out.println(axiom); + ++noOfAxioms; + } + + } + Utility.logInfo("The number of data property range axioms that are ignored: " + noOfDataPropertyRangeAxioms + "(" + noOfAxioms + ")"); + + DLOntology dlOntology = (DLOntology) clausifier.preprocessAndClausify(filteredOntology, null)[1]; + dlClauses = dlOntology.getDLClauses(); + positiveFacts = dlOntology.getPositiveFacts(); + } + + private DLClause removeAuxiliaryBodyAtoms(DLClause dlClause) { + Collection newBodyAtoms = new LinkedList(); + DLPredicate p; + for(Atom bodyAtom : dlClause.getBodyAtoms()) { + p = bodyAtom.getDLPredicate(); + if(p instanceof AtomicConcept || + p instanceof AtomicRole || p instanceof InverseRole || + p instanceof Equality || p instanceof AnnotatedEquality || p instanceof Inequality) + newBodyAtoms.add(bodyAtom); + } + LinkedList newHeadAtoms = new LinkedList(); + Map assign = new HashMap(); + for(Atom headAtom : dlClause.getHeadAtoms()) { + p = headAtom.getDLPredicate(); + if(p instanceof AtomicNegationDataRange) { + AtomicDataRange positive = ((AtomicNegationDataRange) p).getNegatedDataRange(); + if(!(positive instanceof ConstantEnumeration)) + newBodyAtoms.add(Atom.create(positive, headAtom.getArgument(0))); + else if(((ConstantEnumeration) positive).getNumberOfConstants() == 1) { + assign.put((Variable) headAtom.getArgument(0), ((ConstantEnumeration) positive).getConstant(0)); +// newBodyAtoms.add(Atom.create(Equality.INSTANCE, headAtom.getArgument(0), ((ConstantEnumeration) positive).getConstant(0))); + } + else newHeadAtoms.add(headAtom); + } + else + newHeadAtoms.add(headAtom); + } + + if(assign.isEmpty() && newHeadAtoms.isEmpty() && newBodyAtoms.size() == dlClause.getBodyLength()) + return dlClause; + + Atom[] headArray = + newHeadAtoms.size() == dlClause.getHeadLength() ? dlClause.getHeadAtoms() : newHeadAtoms.toArray(new Atom[0]); + Atom[] bodyArray = + newBodyAtoms.size() == dlClause.getBodyLength() ? dlClause.getBodyAtoms() : newBodyAtoms.toArray(new Atom[0]); + if(!assign.isEmpty()) { + for(int i = 0; i < headArray.length; ++i) + headArray[i] = DLClauseHelper.getInstance(headArray[i], assign); + for(int i = 0; i < bodyArray.length; ++i) + bodyArray[i] = DLClauseHelper.getInstance(bodyArray[i], assign); + } + return DLClause.create(headArray, bodyArray); + } + + private void addingTransitiveAxioms() { + DLClause transitiveClause; + Atom headAtom; + Variable X = Variable.create("X"), Y = Variable.create("Y"), Z = Variable.create("Z"); + transitiveClauses = new LinkedList(); + for(OWLTransitiveObjectPropertyAxiom axiom : transitiveAxioms) { + OWLObjectPropertyExpression objExp = axiom.getProperty(); + headAtom = getAtom(objExp, X, Z); + Atom[] bodyAtoms = new Atom[2]; + bodyAtoms[0] = getAtom(objExp, X, Y); + bodyAtoms[1] = getAtom(objExp, Y, Z); + transitiveClause = DLClause.create(new Atom[]{headAtom}, bodyAtoms); + clauses.add(transitiveClause); + transitiveClauses.add(transitiveClause); + } + } + + private Atom getAtom(OWLObjectPropertyExpression exp, Variable x, Variable y) { + if(exp instanceof OWLObjectProperty) + return Atom.create(AtomicRole.create(((OWLObjectProperty) exp).toStringID()), x, y); + // TODO fixed, test it + OWLObjectPropertyExpression inverseOf; + if(exp instanceof OWLObjectInverseOf && (inverseOf = ( + (OWLObjectInverseOf) exp).getInverse()) instanceof OWLObjectProperty) + return Atom.create(AtomicRole.create(((OWLObjectProperty) inverseOf).toStringID()), x, y); + return null; + } + + private void addingSubPropertyChainAxioms() { + DLClause dlClause; + subPropChainClauses = new LinkedList(); + Atom headAtom; + Iterator iterExp; + OWLObjectPropertyExpression objExp; + for(OWLSubPropertyChainOfAxiom axiom : subPropChainAxioms) { + objExp = axiom.getSuperProperty(); + List objs = axiom.getPropertyChain(); + headAtom = getAtom(objExp, Variable.create("X"), Variable.create("X" + objs.size())); + iterExp = objs.iterator(); + int index = 1; + Atom[] bodyAtoms = new Atom[objs.size()]; + bodyAtoms[0] = getAtom(iterExp.next(), Variable.create("X"), Variable.create("X1")); + while(index < objs.size()) { + bodyAtoms[index] = + getAtom(iterExp.next(), Variable.create("X" + index), Variable.create("X" + (index + 1))); + ++index; + } + dlClause = DLClause.create(new Atom[]{headAtom}, bodyAtoms); + clauses.add(dlClause); + subPropChainClauses.add(dlClause); + } + } + + private boolean relevant(DLClause clause, Set set) { + for(DLPredicate p : dependencyGraph.collectPredicate(clause.getHeadAtoms())) + if(set.contains(p)) + return true; + return false; + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java new file mode 100644 index 0000000..611e183 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java @@ -0,0 +1,47 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.HermiT.model.DLPredicate; +import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxBoth; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + + +public class UpperDatalogProgram extends UpperProgram { + + public UpperDatalogProgram() {} + +// @Override +// public String getDirectory() { +// File dir = new File(ontologyDirectory + Utility.FILE_SEPARATOR + "datalog"); +// if (!dir.exists()) +// dir.mkdirs(); +// return dir.getPath(); +// } + + @Override + protected void initApproximator() { + m_approx = new OverApproxBoth(); + } + + public int getBottomNumber() { + return botStrategy.getBottomNumber(); + } + + public void updateDependencyGraph(Collection delta) { + Map map = new HashMap(); + for (DLClause clause: clauses) + if (botStrategy.isBottomRule(clause)) + map.put(clause.getHeadAtom(0).getDLPredicate(), getCorrespondingClause(clause)); + + for (DLClause clause: delta) { + clauses.add(clause); + correspondence.put(clause, map.get(clause.getBodyAtom(0).getDLPredicate())); + } + + dependencyGraph.update(delta); + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/UpperProgram.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/UpperProgram.java new file mode 100644 index 0000000..52d60b7 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/UpperProgram.java @@ -0,0 +1,12 @@ +package uk.ac.ox.cs.pagoda.rules; + +import org.apache.commons.io.FilenameUtils; + +public abstract class UpperProgram extends ApproxProgram { + + @Override + public String getOutputPath() { + return FilenameUtils.concat(getDirectory(), "upper.dlog"); + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java new file mode 100644 index 0000000..f910c64 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java @@ -0,0 +1,42 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.DLClause; + +import java.util.Collection; + +public interface Approximator { + + Collection convert(DLClause clause, DLClause originalClause); + +} + +// TODO remove +//class IgnoreExist implements Approximator { +// +// @Override +// public Collection convert(DLClause clause, DLClause originalClause) { +// Collection ret = new LinkedList(); +// DLPredicate p; +// for (Atom headAtom: clause.getHeadAtoms()) { +// p = headAtom.getDLPredicate(); +// if (p instanceof AtLeast) return ret; +// } +// +// ret.add(clause); +// return ret; +// } +// +//} +// +// +// +//class IgnoreDisj implements Approximator { +// +// @Override +// public Collection convert(DLClause clause, DLClause originalClause) { +// Collection ret = new LinkedList(); +// if (clause.getHeadLength() > 1) return ret; +// ret.add(clause); +// return ret; +// } +//} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java new file mode 100644 index 0000000..a140225 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java @@ -0,0 +1,146 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.*; +import uk.ac.ox.cs.pagoda.multistage.MultiStageUpperProgram; +import uk.ac.ox.cs.pagoda.util.tuples.Tuple; +import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; + +/*** + * Approximates existential rules through a limited form of Skolemisation. + *

+ * Given a rule and a ground substitution, + * it Skolemises the rule + * if all the terms in the substitution have depth less than a given depth, + * otherwise it approximates using an alternative TupleDependentApproximator. + */ +public class LimitedSkolemisationApproximator implements TupleDependentApproximator { + + private static final Atom[] EMPTY_BODY = new Atom[0]; + private static final Variable X = Variable.create("X"); + private final int maxTermDepth; + private final SkolemTermsManager skolemTermsManager; + + public LimitedSkolemisationApproximator(int maxTermDepth) { + this.maxTermDepth = maxTermDepth; + this.skolemTermsManager = SkolemTermsManager.getInstance(); + } + + @Override + public Collection convert(DLClause clause, + DLClause originalClause, + Collection> violationTuples) { + switch(clause.getHeadLength()) { + case 1: + return overApprox(clause, originalClause, violationTuples); + case 0: + return Collections.singletonList(clause); + default: + throw new IllegalArgumentException( + "Expected clause with head length < 1, but it is " + clause.getHeadLength()); + } + + + } + + public int getMaxDepth(Tuple violationTuple) { + int maxDepth = 0; + for(Individual individual : violationTuple) + maxDepth = Integer.max(maxDepth, skolemTermsManager.getDepthOf(individual)); + + return maxDepth; + } + + private Collection overApprox(DLClause clause, DLClause originalClause, Collection> violationTuples) { + ArrayList result = new ArrayList<>(); + for(Tuple violationTuple : violationTuples) { + result.addAll(getGroundSkolemisation(clause, + originalClause, violationTuple, getMaxDepth(violationTuple) >= maxTermDepth)); + } + + return result; + } + + private Collection getGroundSkolemisation(DLClause clause, + DLClause originalClause, + Tuple violationTuple, + boolean useClauseUniqueIndividual) { + + String[] commonVars = MultiStageUpperProgram.getCommonVars(clause); + + // TODO check: strong assumption, the first tuples are the common ones + TupleBuilder commonIndividualsBuilder = new TupleBuilder<>(); + for(int i = 0; i < commonVars.length; i++) + commonIndividualsBuilder.append(violationTuple.get(i)); + Tuple commonIndividuals = commonIndividualsBuilder.build(); + + Atom headAtom = clause.getHeadAtom(0); + +// Atom[] bodyAtoms = clause.getBodyAtoms(); + int offset = OverApproxExist.indexOfExistential(headAtom, originalClause); + + // BEGIN: copy and paste + ArrayList ret = new ArrayList<>(); + DLPredicate predicate = headAtom.getDLPredicate(); + if(predicate instanceof AtLeastConcept) { + AtLeastConcept atLeastConcept = (AtLeastConcept) predicate; + LiteralConcept concept = atLeastConcept.getToConcept(); + Role role = atLeastConcept.getOnRole(); + AtomicConcept atomicConcept; + + if(concept instanceof AtomicNegationConcept) { + Atom atom1 = + Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X); + Atom atom2 = Atom.create(atomicConcept = OverApproxExist.getNegationConcept(atomicConcept), X); + ret.add(DLClause.create(new Atom[0], new Atom[]{atom1, atom2})); + } + else { + atomicConcept = (AtomicConcept) concept; + if(atomicConcept.equals(AtomicConcept.THING)) + atomicConcept = null; + } + + int card = atLeastConcept.getNumber(); + Individual[] individuals = new Individual[card]; + SkolemTermsManager termsManager = SkolemTermsManager.getInstance(); + for(int i = 0; i < card; ++i) + if(useClauseUniqueIndividual) + individuals[i] = termsManager.getFreshIndividual(originalClause, + offset + i, + maxTermDepth + 1); + else + individuals[i] = termsManager.getFreshIndividual(originalClause, + offset + i, + commonIndividuals); + + for(int i = 0; i < card; ++i) { + if(atomicConcept != null) + ret.add(DLClause.create(new Atom[]{Atom.create(atomicConcept, individuals[i])}, EMPTY_BODY)); + + Atom atom = role instanceof AtomicRole ? + Atom.create((AtomicRole) role, commonIndividuals.get(0), individuals[i]) : + Atom.create(((InverseRole) role).getInverseOf(), individuals[i], commonIndividuals.get(0)); + + ret.add(DLClause.create(new Atom[]{atom}, EMPTY_BODY)); + } + + for(int i = 0; i < card; ++i) + for(int j = i + 1; j < card; ++j) + // TODO to be checked ... different as + ret.add(DLClause.create(new Atom[]{Atom.create(Inequality.INSTANCE, individuals[i], individuals[j])}, EMPTY_BODY)); + + } + else if(predicate instanceof AtLeastDataRange) { + // TODO to be implemented ... + } + else + ret.add(DLClause.create(new Atom[]{headAtom}, EMPTY_BODY)); + + return ret; + + // END: copy and paste + } +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java new file mode 100644 index 0000000..ae2a2cf --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java @@ -0,0 +1,24 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.AtLeastDataRange; +import org.semanticweb.HermiT.model.DLClause; + +import java.util.Collection; +import java.util.LinkedList; + +public class OverApproxBoth implements Approximator { + + Approximator approxDist = new OverApproxDisj(), approxExist = new OverApproxExist(); + + @Override + public Collection convert(DLClause clause, DLClause originalClause) { + Collection ret = new LinkedList(); + for (DLClause tClause: approxDist.convert(clause, originalClause)) { + if (tClause.getHeadLength() > 0 && tClause.getHeadAtom(0).getDLPredicate() instanceof AtLeastDataRange) + continue; + ret.addAll(approxExist.convert(tClause, originalClause)); + } + return ret; + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java new file mode 100644 index 0000000..05d9442 --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java @@ -0,0 +1,100 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.*; +import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; + +import java.util.*; + +public class OverApproxDisj implements Approximator { + + /** + * Splits a disjunctive rule into a bunch of rules. + *

+ * It returns a collection containing a rule for each atom in the head of the input rule. + * Each rule has the same body of the input rule, + * and the relative head atom as head. + * */ + @Override + public Collection convert(DLClause clause, DLClause originalClause) { + LinkedList distincts = new LinkedList(); + Atom[] headAtoms = clause.getHeadAtoms(), bodyAtoms = clause.getBodyAtoms(); + LinkedList newClauses = new LinkedList(); + DLClause newClause; + if (headAtoms.length > 1) { + for (Atom headAtom: headAtoms) { + newClause = DLClause.create(new Atom[] {headAtom}, bodyAtoms); + newClauses.add(newClause); +// distincts.add(newClause); + } + + for (DLClause cls: newClauses) { + newClause = DLClauseHelper.simplified(cls); + if (!isSubsumedBy(newClause, distincts)) + distincts.add(newClause); + } + } + else distincts.add(clause); + + return distincts; + } + + public static boolean isSubsumedBy(DLClause newClause, Collection distinctClauses) { + Map unifier; + Set bodyAtoms = new HashSet(); + for (DLClause clause: distinctClauses) { + if (newClause.getHeadLength() > 0 && clause.getHeadLength() > 0 && + (unifier = isSubsumedBy(newClause.getHeadAtom(0), clause.getHeadAtom(0))) == null) + continue; + else + unifier = new HashMap(); + + for (Atom atom: clause.getBodyAtoms()) + bodyAtoms.add(rename(atom, unifier)); + unifier.clear(); + + for (Atom atom: newClause.getBodyAtoms()) + if (!bodyAtoms.contains(atom)) + continue; + + return true; + } + + return false; + } + + public static Map isSubsumedBy(Atom atom1, Atom atom2) { + DLPredicate predicate = atom1.getDLPredicate(); + if (!predicate.equals(atom2.getDLPredicate())) + return null; + + Map unifier = new HashMap(); + Term term1, term2; + for (int index = 0; index < predicate.getArity(); ++index) { + term1 = rename(atom1.getArgument(index), unifier); + term2 = rename(atom2.getArgument(index), unifier); + + if (term1.equals(term2)); + else if (term1 instanceof Variable) + unifier.put((Variable) term1, term2); + else + return null; + } + return unifier; + } + + public static Atom rename(Atom atom, Map unifier) { + Term[] arguments = new Term[atom.getArity()]; + for (int i = 0; i < atom.getArity(); ++i) + arguments[i] = rename(atom.getArgument(i), unifier); + return Atom.create(atom.getDLPredicate(), arguments); + } + + public static Term rename(Term argument, Map unifier) { + Term newArg; + while ((newArg = unifier.get(argument)) != null) + return newArg; + return argument; + } + + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java new file mode 100644 index 0000000..028568c --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java @@ -0,0 +1,224 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.*; +import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; + +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedList; + +public class OverApproxExist implements Approximator { + + public static final String negativeSuffix = "_neg"; + private static final Variable X = Variable.create("X"); + + static int indexOfExistential(Atom headAtom, DLClause originalClause) { + if (!(headAtom.getDLPredicate() instanceof AtLeast)) return -1; + AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate(); + if (alc.getToConcept() instanceof AtomicConcept) { + AtomicConcept ac = (AtomicConcept) alc.getToConcept(); + if (ac.getIRI().endsWith(negativeSuffix)) { + alc = AtLeastConcept.create(alc.getNumber(), alc.getOnRole(), AtomicNegationConcept.create(getNegationConcept(ac))); + headAtom = Atom.create(alc, headAtom.getArgument(0)); + } + } + + int index = 0; + for (Atom atom: originalClause.getHeadAtoms()) { + if (atom.equals(headAtom)) + return index; + if (atom.getDLPredicate() instanceof AtLeast) + index += ((AtLeast) atom.getDLPredicate()).getNumber(); + } + return -1; + } + + public static AtomicConcept getNegationConcept(DLPredicate p) { + if (p.equals(AtomicConcept.THING)) return AtomicConcept.NOTHING; + if (p.equals(AtomicConcept.NOTHING)) return AtomicConcept.THING; + + if (p instanceof AtomicConcept) { + String iri = ((AtomicConcept) p).getIRI(); + if (iri.endsWith(negativeSuffix)) + iri = iri.substring(0, iri.length() - 4); + else + iri += negativeSuffix; + + return AtomicConcept.create(iri); + } + if (p instanceof AtLeastConcept) { + // FIXME !!! here + return null; + } + return null; + } + + @Override + public Collection convert(DLClause clause, DLClause originalClause) { + Collection ret; + switch (clause.getHeadLength()) { + case 1: + return overApprox(clause.getHeadAtom(0), clause.getBodyAtoms(), originalClause); + case 0: + ret = new LinkedList(); + ret.add(clause); + return ret; + default: + ret = new LinkedList(); + for (Iterator iter = new DisjunctiveHeads(clause, originalClause); iter.hasNext(); ) + ret.add(iter.next()); + return ret; + } + } + + public Collection overApprox(Atom headAtom, Atom[] bodyAtoms, DLClause originalClause) { + return overApprox(headAtom, bodyAtoms, originalClause, indexOfExistential(headAtom, originalClause)); + } + + public Collection overApprox(Atom headAtom, Atom[] bodyAtoms, DLClause originalClause, int offset) { + Collection ret = new LinkedList(); + DLPredicate predicate = headAtom.getDLPredicate(); + if (predicate instanceof AtLeastConcept) { + AtLeastConcept atLeastConcept = (AtLeastConcept) predicate; + LiteralConcept concept = atLeastConcept.getToConcept(); + Role role = atLeastConcept.getOnRole(); + AtomicConcept atomicConcept = null; + + if (concept instanceof AtomicNegationConcept) { + // TODO CHECK: is this already in MultiStageUpperProgram? + Atom atom1 = Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X); + Atom atom2 = Atom.create(atomicConcept = getNegationConcept(atomicConcept), X); + ret.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2})); + } + else { + atomicConcept = (AtomicConcept) concept; + if (atomicConcept.equals(AtomicConcept.THING)) + atomicConcept = null; + } + + int card = atLeastConcept.getNumber(); + Individual[] individuals = new Individual[card]; + SkolemTermsManager termsManager = SkolemTermsManager.getInstance(); + for (int i = 0; i < card; ++i) individuals[i] = termsManager.getFreshIndividual(originalClause, offset + i); + + for (int i = 0; i < card; ++i) { + if (atomicConcept != null) + ret.add(DLClause.create(new Atom[] {Atom.create(atomicConcept, individuals[i])}, bodyAtoms)); + + Atom atom = role instanceof AtomicRole ? + Atom.create((AtomicRole) role, X, individuals[i]) : + Atom.create(((InverseRole) role).getInverseOf(), individuals[i], X); + + ret.add(DLClause.create(new Atom[] {atom}, bodyAtoms)); + } + + for (int i = 0; i < card; ++i) + for (int j = i + 1; j < card; ++j) + // TODO to be checked ... different as + ret.add(DLClause.create(new Atom[] {Atom.create(Inequality.INSTANCE, individuals[i], individuals[j])}, bodyAtoms)); + //DLClauseHelper.contructor_differentAs(individuals[i], individuals[j])); + + } + else if (predicate instanceof AtLeastDataRange) { + // TODO to be implemented ... + } + else + ret.add(DLClause.create(new Atom[] {headAtom}, bodyAtoms)); + + return ret; + } + + class DisjunctiveHeads implements Iterator { + + Atom[] bodyAtoms; + Atom[][] disjunctHeadAtoms; + int[] pointer; + int length, l; + LinkedList auxiliaryClauses = new LinkedList(); + + public DisjunctiveHeads(DLClause clause, DLClause originalClause) { + length = clause.getHeadLength(); + + bodyAtoms = clause.getBodyAtoms(); + disjunctHeadAtoms = new Atom[length][]; + pointer = new int[length]; + if (length > 0) l = length - 1; + else length = 0; + + int index = 0, offset = 0; + Collection datalogRules; + DLClause newClause; + for (Atom headAtom: clause.getHeadAtoms()) { + pointer[index] = 0; + + datalogRules = overApprox(headAtom, bodyAtoms, originalClause, offset); + + if (datalogRules.isEmpty()) { + l = -1; + auxiliaryClauses.clear(); + return ; + } + + for (Iterator iter = datalogRules.iterator(); iter.hasNext(); ) { + newClause = iter.next(); + if (!DLClauseHelper.hasSubsetBodyAtoms(newClause, clause)) { + auxiliaryClauses.add(newClause); + iter.remove(); + } + } + + disjunctHeadAtoms[index] = new Atom[datalogRules.size()]; + + int j = 0; + for (DLClause disjunct: datalogRules) { + disjunctHeadAtoms[index][j++] = disjunct.getHeadAtom(0); + } + + ++index; + if (headAtom.getDLPredicate() instanceof AtLeast) + offset += ((AtLeast) headAtom.getDLPredicate()).getNumber(); + + } + + } + + @Override + public boolean hasNext() { + return l != -1 || !auxiliaryClauses.isEmpty(); + } + + @Override + public DLClause next() { + if (l == -1) + return auxiliaryClauses.removeFirst(); + + if (length > 0) { + Atom[] headAtoms = new Atom[length]; + for (int i = 0; i < length; ++i) + headAtoms[i] = disjunctHeadAtoms[i][pointer[i]]; + + ++pointer[l]; + while (l >= 0 && pointer[l] >= disjunctHeadAtoms[l].length) { + pointer[l] = 0; + --l; + if (l >= 0) + ++pointer[l]; + } + + if (l >= 0) l = length - 1; + + return DLClauseHelper.simplified(DLClause.create(headAtoms, bodyAtoms)); +// return DLClause.create(headAtoms, bodyAtoms); + } + else { + --l; + return DLClauseHelper.simplified(DLClause.create(new Atom[0], bodyAtoms)); +// return DLClause.create(new Atom[0], bodyAtoms); + } + } + + @Override + public void remove() { } + + } +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java new file mode 100644 index 0000000..ed93d0e --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java @@ -0,0 +1,139 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.*; +import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.pagoda.util.tuples.Tuple; + +import java.util.HashMap; +import java.util.Map; + +/** + * If you need a Skolem term (i.e. fresh individual), ask this class. + */ +public class SkolemTermsManager { + + public static final String SKOLEMISED_INDIVIDUAL_PREFIX = Namespace.PAGODA_ANONY + "individual"; + public static final String RULE_UNIQUE_SKOLEMISED_INDIVIDUAL_PREFIX = SKOLEMISED_INDIVIDUAL_PREFIX + "_unique"; + + private static SkolemTermsManager skolemTermsManager; + + private int termsCounter = 0; + private Map clauseToId_map = new HashMap<>(); + private Map individualToDepth_map = new HashMap<>(); + private int dependenciesCounter = 0; + + private Map, Integer> dependencyToId_map = new HashMap<>(); + + private SkolemTermsManager() { + } + + public static int indexOfSkolemisedIndividual(Atom atom) { + Term t; + for(int index = 0; index < atom.getArity(); ++index) { + t = atom.getArgument(index); + if(t instanceof Individual && ((Individual) t).getIRI().contains(SKOLEMISED_INDIVIDUAL_PREFIX)) + return index; + } + return -1; + } + + /** + * Returns the existing unique SkolemTermsManager or a new one. + *

+ * Indeed the SkolemTermsManager is a singleton. + */ + public static SkolemTermsManager getInstance() { + if(skolemTermsManager == null) skolemTermsManager = new SkolemTermsManager(); + return skolemTermsManager; + } + + /** + * Get a fresh Individual, unique for the clause, the offset and the dependency. + */ + public Individual getFreshIndividual(DLClause originalClause, int offset, Tuple dependency) { + String termId = Integer.toString(mapClauseToId(originalClause) + offset) + + "_" + mapDependencyToId(dependency); + Individual newIndividual = Individual.create(SKOLEMISED_INDIVIDUAL_PREFIX + termId); + + if(!individualToDepth_map.containsKey(newIndividual)) { + int depth = 0; + for (Individual individual : dependency) + depth = Integer.max(depth, getDepthOf(individual)); + individualToDepth_map.put(newIndividual, depth + 1); + } + + return newIndividual; + } + + /*** + * Create a term of a given depth, unique for the clause and the depth. + * + * @param originalClause + * @param offset + * @param depth + * @return + */ + public Individual getFreshIndividual(DLClause originalClause, int offset, int depth) { + String termId = Integer.toString(mapClauseToId(originalClause) + offset) + "_depth_" + depth; + Individual newIndividual = Individual.create(RULE_UNIQUE_SKOLEMISED_INDIVIDUAL_PREFIX + termId); + + individualToDepth_map.putIfAbsent(newIndividual, depth); + + return newIndividual; + } + + /** + * Get a fresh Individual, unique for the clause and the offset. + */ + public Individual getFreshIndividual(DLClause originalClause, int offset) { + String termId = Integer.toString(mapClauseToId(originalClause) + offset); + return Individual.create(SKOLEMISED_INDIVIDUAL_PREFIX + termId); + } + + /** + * Get the depth of a term. + *

+ * The term must have been generated by this manager. + */ + public int getDepthOf(Individual individual) { + if(individualToDepth_map.containsKey(individual)) return individualToDepth_map.get(individual); + else return 0; + } + + /** + * Get the number of individuals generated by this manager. + */ + public int getSkolemIndividualsCount() { + return individualToDepth_map.keySet().size(); + } + + /** + * Just for reading the clause id from LimitedSkolemisationApproximator. + */ + int getClauseId(DLClause clause) { + return clauseToId_map.get(clause); + } + + private int mapClauseToId(DLClause clause) { + if(!clauseToId_map.containsKey(clause)) { + clauseToId_map.put(clause, termsCounter); + termsCounter += noOfExistential(clause); + } + return clauseToId_map.get(clause); + } + + private int mapDependencyToId(Tuple dependency) { + if(!dependencyToId_map.containsKey(dependency)) + dependencyToId_map.put(dependency, dependenciesCounter++); + return dependencyToId_map.get(dependency); + } + + private int noOfExistential(DLClause originalClause) { + int no = 0; + for(Atom atom : originalClause.getHeadAtoms()) + if(atom.getDLPredicate() instanceof AtLeast) + no += ((AtLeast) atom.getDLPredicate()).getNumber(); + return no; + } + +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java new file mode 100644 index 0000000..c99a1ad --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java @@ -0,0 +1,19 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.HermiT.model.Individual; +import uk.ac.ox.cs.pagoda.util.tuples.Tuple; + +import java.util.Collection; + +/** + * It can approximate clauses according to a collection of tuples of individuals. + *

+ * In particular it can be used to approximate rules given some body instantiations. + */ +public interface TupleDependentApproximator { + + Collection convert(DLClause clause, + DLClause originalClause, + Collection> individualsTuples); +} diff --git a/src/main/java/uk/ac/ox/cs/pagoda/rules/clauses/Clause.java b/src/main/java/uk/ac/ox/cs/pagoda/rules/clauses/Clause.java new file mode 100644 index 0000000..2adb66b --- /dev/null +++ b/src/main/java/uk/ac/ox/cs/pagoda/rules/clauses/Clause.java @@ -0,0 +1,100 @@ +package uk.ac.ox.cs.pagoda.rules.clauses; + +public class Clause { + +// public static final String IF = ":-"; +// public static final String OR = "|"; +// public static final String AND = ","; +// +// protected final List> head; +// protected final List body; +// +// protected Clause(Atom[] headAtoms, Atom[] bodyAtoms) { +// this.head = Collections.singletonList(Arrays.asList(headAtoms)); +// this.body= Arrays.asList(bodyAtoms); +// } +// +// protected Clause(String s) { +// this.headAtoms = null; +// this.bodyAtoms = null; +// } +// +// public int getHeadLength() { +// return headAtoms.length; +// } +// +// public Atom getHeadAtom(int atomIndex) { +// return headAtoms[atomIndex]; +// } +// +// public Atom[] getHeadAtoms() { +// return headAtoms.clone(); +// } +// +// public int getBodyLength() { +// return bodyAtoms.length; +// } +// +// public Atom getBodyAtom(int atomIndex) { +// return bodyAtoms[atomIndex]; +// } +// +// public Atom[] getBodyAtoms() { +// return bodyAtoms.clone(); +// } +// +// public String toString(Prefixes prefixes) { +// StringBuilder buffer = new StringBuilder(); +// for(int headIndex = 0; headIndex < headAtoms.length; headIndex++) { +// if(headIndex != 0) +// buffer.append(" ").append(OR).append(" "); +// buffer.append(headAtoms[headIndex].toString(prefixes)); +// } +// buffer.append(" ").append(IF).append(" "); +// for(int bodyIndex = 0; bodyIndex < bodyAtoms.length; bodyIndex++) { +// if(bodyIndex != 0) +// buffer.append(AND).append(" "); +// buffer.append(bodyAtoms[bodyIndex].toString(prefixes)); +// } +// return buffer.toString(); +// } +// +// public String toString() { +// return toString(Prefixes.STANDARD_PREFIXES); +// } +// +// protected static InterningManager s_interningManager = new InterningManager() { +// protected boolean equal(Clause object1, Clause object2) { +// if(object1.head.length != object2.headAtoms.length +// || object1.bodyAtoms.length != object2.bodyAtoms.length) +// return false; +// for(int index = object1.headAtoms.length - 1; index >= 0; --index) +// if(object1.headAtoms[index] != object2.headAtoms[index]) +// return false; +// for(int index = object1.bodyAtoms.length - 1; index >= 0; --index) +// if(object1.bodyAtoms[index] != object2.bodyAtoms[index]) +// return false; +// return true; +// } +// +// protected int getHashCode(Clause object) { +// int hashCode = 0; +// for(int index = object.bodyAtoms.length - 1; index >= 0; --index) +// hashCode += object.bodyAtoms[index].hashCode(); +// for(int index = object.headAtoms.length - 1; index >= 0; --index) +// hashCode += object.headAtoms[index].hashCode(); +// return hashCode; +// } +// }; +// +// /** +// * Creates a clause from a string. +// * +// * @param s +// * @return +// */ +// public static Clause create(String s) { +// return s_interningManager.intern(new Clause(s)); +// } + +} -- cgit v1.2.3