aboutsummaryrefslogtreecommitdiff
path: root/src/uk/ac/ox/cs
diff options
context:
space:
mode:
authorRncLsn <rnc.lsn@gmail.com>2015-05-12 18:48:56 +0100
committerRncLsn <rnc.lsn@gmail.com>2015-05-12 18:48:56 +0100
commit0c2726db44b562cbda9bfa87e76d829927c31ec8 (patch)
treef4a681da5802ca90888719171a05a5d5cf78f040 /src/uk/ac/ox/cs
parent4fe4ca32d8f45807ab881b6fb8e814842dad0ec6 (diff)
downloadACQuA-0c2726db44b562cbda9bfa87e76d829927c31ec8.tar.gz
ACQuA-0c2726db44b562cbda9bfa87e76d829927c31ec8.zip
Added classes for implementing new upper store (Limited Skolemisation).
Started implementation of the new classes.
Diffstat (limited to 'src/uk/ac/ox/cs')
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java7
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApproximator.java36
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java10
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java387
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java101
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java73
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java129
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java (renamed from src/uk/ac/ox/cs/pagoda/multistage/ExistConstantApproximator.java)6
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/LimitedSkolemisationApproximator.java72
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java104
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/TupleDependentApproximator.java (renamed from src/uk/ac/ox/cs/pagoda/multistage/ExistApproximator.java)9
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java49
12 files changed, 490 insertions, 493 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java
index b0068e7..13752a3 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java
@@ -2,15 +2,16 @@ package uk.ac.ox.cs.pagoda.multistage;
2 2
3 3
4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
5import uk.ac.ox.cs.pagoda.rules.ExistConstantApproximator;
6import uk.ac.ox.cs.pagoda.rules.LimitedSkolemisationApproximator;
5import uk.ac.ox.cs.pagoda.rules.Program; 7import uk.ac.ox.cs.pagoda.rules.Program;
6 8
7public class LimitedSkolemisationApplication extends RestrictedApplication { 9public class LimitedSkolemisationApplication extends RestrictedApplication {
8 10
11 public static final int MAX_DEPTH = 1;
9 12
10 public LimitedSkolemisationApplication(Program program, BottomStrategy upperBottom) { 13 public LimitedSkolemisationApplication(Program program, BottomStrategy upperBottom) {
11 super(program, upperBottom); 14 super(program, upperBottom);
15 m_approxExist = new LimitedSkolemisationApproximator(MAX_DEPTH, new ExistConstantApproximator());
12 } 16 }
13
14
15 // TODO override methods properly
16} 17}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApproximator.java b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApproximator.java
deleted file mode 100644
index 348e849..0000000
--- a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApproximator.java
+++ /dev/null
@@ -1,36 +0,0 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import org.semanticweb.HermiT.model.DLClause;
4
5import java.util.Collection;
6
7/**
8 * Approximates existential rules by a limited form of Skolemisation.
9 * <p>
10 * Given a rule and a grounding substitution,
11 * it Skolemises the rule if
12 * all the terms in the substitution have depth less than a given depth,
13 * otherwise it approximates using an alternative <tt>ExistApproximator</tt>.
14 *
15 * */
16public class LimitedSkolemisationApproximator implements ExistApproximator {
17
18 private final int maxTermDepth;
19 private final ExistApproximator alternativeApproximator;
20
21 public LimitedSkolemisationApproximator(int maxTermDepth) {
22 this(maxTermDepth, new ExistConstantApproximator());
23 }
24
25 public LimitedSkolemisationApproximator(int maxTermDepth, ExistApproximator alternativeApproximator) {
26 this.maxTermDepth = maxTermDepth;
27 this.alternativeApproximator = alternativeApproximator;
28 }
29
30 @Override
31 public Collection<DLClause> convert(DLClause clause,
32 DLClause originalClause,
33 Collection<AnswerTupleID> violationTuples) {
34 return null;
35 }
36}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
index 7e40faf..f0f631a 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
@@ -69,11 +69,11 @@ public class MultiStageQueryEngine extends StageQueryEngine {
69 // TODO to be removed ... 69 // TODO to be removed ...
70// if (gap == null) 70// if (gap == null)
71// program.save("output/multi.dlog"); 71// program.save("output/multi.dlog");
72 72
73 Collection<Violation> violations = null; 73 Collection<Violation> violations;
74 int iteration = 0; 74 int iteration = 0;
75 Timer subTimer = new Timer(); 75 Timer subTimer = new Timer();
76 boolean incrementally = false; 76 boolean incrementally;
77 try { 77 try {
78 while (true) { 78 while (true) {
79 long oldTripleCount = store.getTriplesCount(); 79 long oldTripleCount = store.getTriplesCount();
@@ -106,7 +106,7 @@ public class MultiStageQueryEngine extends StageQueryEngine {
106 106
107 if (!isValid()) { 107 if (!isValid()) {
108 if (iteration == 1) { 108 if (iteration == 1) {
109 Utility.logInfo("The ontology is incosistent."); 109 Utility.logInfo("The ontology is inconsistent.");
110 return -1; 110 return -1;
111 } 111 }
112 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds."); 112 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds.");
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java
index daf7c2c..acaf167 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java
@@ -7,8 +7,10 @@ import uk.ac.ox.cs.pagoda.MyPrefixes;
7import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 7import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
8import uk.ac.ox.cs.pagoda.hermit.RuleHelper; 8import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
9import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager; 9import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
10import uk.ac.ox.cs.pagoda.rules.ExistConstantApproximator;
10import uk.ac.ox.cs.pagoda.rules.OverApproxExist; 11import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
11import uk.ac.ox.cs.pagoda.rules.Program; 12import uk.ac.ox.cs.pagoda.rules.Program;
13import uk.ac.ox.cs.pagoda.rules.TupleDependentApproximator;
12import uk.ac.ox.cs.pagoda.util.Namespace; 14import uk.ac.ox.cs.pagoda.util.Namespace;
13import uk.ac.ox.cs.pagoda.util.SparqlHelper; 15import uk.ac.ox.cs.pagoda.util.SparqlHelper;
14import uk.ac.ox.cs.pagoda.util.Timer; 16import uk.ac.ox.cs.pagoda.util.Timer;
@@ -18,212 +20,264 @@ import java.io.*;
18import java.util.*; 20import java.util.*;
19 21
20public abstract class MultiStageUpperProgram { 22public abstract class MultiStageUpperProgram {
21
22 Set<DLClause> constraints = new HashSet<DLClause>();
23 Set<DLClause> rules = new HashSet<DLClause>();
24 Collection<DLClause> clauses;
25 23
26 BottomStrategy m_bottom = null; 24 protected static final Variable X = Variable.create("X");
27 ExistApproximator m_approxExist = new ExistConstantApproximator(); 25 Set<DLClause> constraints = new HashSet<DLClause>();
28 26 Set<DLClause> rules = new HashSet<DLClause>();
29 protected static final Variable X = Variable.create("X"); 27 Collection<DLClause> clauses;
30 28 BottomStrategy m_bottom = null;
29 TupleDependentApproximator m_approxExist = new ExistConstantApproximator();
30 Map<DLClause, DLClause> map = new HashMap<DLClause, DLClause>();
31 Set<DLPredicate> updatedPredicates = new HashSet<DLPredicate>();
31 private MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes; 32 private MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes;
32 Map<DLClause, DLClause> map = new HashMap<DLClause, DLClause>(); 33 private StringBuilder datalogRuleText = new StringBuilder();
34 private Timer t = new Timer();
33 35
34 public MultiStageUpperProgram(Program program, BottomStrategy upperBottom) { 36 public MultiStageUpperProgram(Program program, BottomStrategy upperBottom) {
35 m_bottom = upperBottom; 37 m_bottom = upperBottom;
36 clauses = getInitialClauses(program); 38 clauses = getInitialClauses(program);
37 Collection<DLClause> introducedConstraints = new LinkedList<DLClause>(); 39 Collection<DLClause> introducedConstraints = new LinkedList<DLClause>();
38 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>(); 40 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>();
39 for (DLClause clause: m_bottom.process(clauses)) { 41 for (DLClause clause: m_bottom.process(clauses)) {
40 if (m_bottom.isBottomRule(clause) || clause.getHeadLength() == 1 && !(clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast)) 42 if (m_bottom.isBottomRule(clause) || clause.getHeadLength() == 1 && !(clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast))
41 addDatalogRule(clause); 43 addDatalogRule(clause);
42 else { 44 else {
43 newHeadAtoms.clear(); 45 newHeadAtoms.clear();
44 boolean changed = false; 46 boolean changed = false;
45 for (Atom atom: clause.getHeadAtoms()) { 47 for (Atom atom: clause.getHeadAtoms()) {
46 if (atom.getDLPredicate() instanceof AtLeastConcept) { 48 if (atom.getDLPredicate() instanceof AtLeastConcept) {
47 AtLeastConcept atLeast = (AtLeastConcept) atom.getDLPredicate(); 49 AtLeastConcept atLeast = (AtLeastConcept) atom.getDLPredicate();
48 if (atLeast.getToConcept() instanceof AtomicNegationConcept) { 50 if (atLeast.getToConcept() instanceof AtomicNegationConcept) {
49 AtomicConcept positive = ((AtomicNegationConcept) atLeast.getToConcept()).getNegatedAtomicConcept(); 51 AtomicConcept positive = ((AtomicNegationConcept) atLeast.getToConcept()).getNegatedAtomicConcept();
50 AtomicConcept negative = OverApproxExist.getNegationConcept(positive); 52 AtomicConcept negative = OverApproxExist.getNegationConcept(positive);
51 Atom atom1 = Atom.create(positive, X); 53 Atom atom1 = Atom.create(positive, X);
52 Atom atom2 = Atom.create(negative, X); 54 Atom atom2 = Atom.create(negative, X);
53 introducedConstraints.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2})); 55 introducedConstraints.add(DLClause.create(new Atom[0], new Atom[]{atom1, atom2}));
54 newHeadAtoms.add( 56 newHeadAtoms.add(
55 Atom.create( 57 Atom.create(
56 AtLeastConcept.create(atLeast.getArity(), atLeast.getOnRole(), negative), 58 AtLeastConcept.create(atLeast.getArity(), atLeast.getOnRole(), negative),
57 atom.getArgument(0))); 59 atom.getArgument(0)));
58 changed = true; 60 changed = true;
59 continue; 61 continue;
60 } 62 }
61 } 63 }
62 else if (atom.getDLPredicate() instanceof AtLeastDataRange) 64 else if (atom.getDLPredicate() instanceof AtLeastDataRange)
63 changed = true; 65 changed = true;
64 else 66 else
65 newHeadAtoms.add(atom); 67 newHeadAtoms.add(atom);
66 68
67 } 69 }
68 if (!changed) constraints.add(clause); 70 if (!changed) constraints.add(clause);
69 else if (!newHeadAtoms.isEmpty()) { 71 else if (!newHeadAtoms.isEmpty()) {
70 DLClause newClause = DLClause.create(newHeadAtoms.toArray(new Atom[0]), clause.getBodyAtoms()); 72 DLClause newClause = DLClause.create(newHeadAtoms.toArray(new Atom[0]), clause.getBodyAtoms());
71 map.put(newClause, getOriginalClause(clause)); 73 map.put(newClause, getOriginalClause(clause));
72 constraints.add(newClause); 74 constraints.add(newClause);
73 } 75 }
74 } 76 }
75 } 77 }
76 78
77 for (DLClause clause: m_bottom.process(introducedConstraints)) 79 for (DLClause clause: m_bottom.process(introducedConstraints))
78 addDatalogRule(clause); 80 addDatalogRule(clause);
79 } 81 }
80 82
81 protected void addDatalogRule(DLClause clause) {
82 rules.add(clause);
83 datalogRuleText.append(RuleHelper.getText(clause)).append(Utility.LINE_SEPARATOR);
84 }
85
86 public static Atom getNegativeAtom(Atom atom) { 83 public static Atom getNegativeAtom(Atom atom) {
87 if (atom.getDLPredicate() instanceof AtomicConcept) 84 if (atom.getDLPredicate() instanceof AtomicConcept)
88 return Atom.create(OverApproxExist.getNegationConcept(atom.getDLPredicate()), atom.getArgument(0)); 85 return Atom.create(OverApproxExist.getNegationConcept(atom.getDLPredicate()), atom.getArgument(0));
89 86
90 if (atom.getDLPredicate() instanceof Inequality) 87 if (atom.getDLPredicate() instanceof Inequality)
91 return Atom.create(Equality.INSTANCE, atom.getArgument(0), atom.getArgument(1)); 88 return Atom.create(Equality.INSTANCE, atom.getArgument(0), atom.getArgument(1));
92 89
93 if (atom.getDLPredicate() instanceof Equality || atom.getDLPredicate() instanceof AnnotatedEquality) 90 if (atom.getDLPredicate() instanceof Equality || atom.getDLPredicate() instanceof AnnotatedEquality)
94 return Atom.create(Inequality.INSTANCE, atom.getArgument(0), atom.getArgument(1)); 91 return Atom.create(Inequality.INSTANCE, atom.getArgument(0), atom.getArgument(1));
95 92
96 return null; 93 return null;
97 } 94 }
98 95
99 private StringBuilder datalogRuleText = new StringBuilder(); 96 public static AnswerTupleID project(AnswerTupleID tuple, String[] vars, String[] subVars) {
97 int subArity = 0;
98 for (int i = 0; i < subVars.length; ++i)
99 if (subVars[i] != null) ++subArity;
100
101 if (tuple.getArity() == subArity)
102 return tuple;
103
104 AnswerTupleID newTuple = new AnswerTupleID(subArity);
105 for (int i = 0, j = 0; i < vars.length; ++i)
106 if (subVars[i] != null && !subVars[i].isEmpty()) {
107 newTuple.setTerm(j++, tuple.getTerm(i));
108 }
109
110 return newTuple;
111 }
112
113 public static String[] getVarSubset(String[] vars, Atom... atoms) {
114 String[] newVars = new String[vars.length];
115 Set<Variable> allVars = new HashSet<Variable>();
116 int arity;
117 for (Atom atom : atoms) {
118 arity = atom.getArity();
119 if (atom.getDLPredicate() instanceof AnnotatedEquality) arity = 2;
120 for (int j = 0; j < arity; ++j)
121 if (atom.getArgument(j) instanceof Variable) {
122 allVars.add(atom.getArgumentVariable(j));
123 }
124 }
125
126 for (int i = 0; i < vars.length; ++i) {
127 newVars[i] = allVars.contains(Variable.create(vars[i])) ? vars[i] : null;
128 }
129
130 return newVars;
131 }
132
133 public static String[] getCommonVars(DLClause clause) {
134 Set<Variable> headVars = getVariables(clause.getHeadAtoms());
135 Set<Variable> bodyVars = getVariables(clause.getBodyAtoms());
136
137 Collection<String> common = new LinkedList<String>();
138 for (Variable v : headVars)
139 if (bodyVars.contains(v)) common.add(v.getName());
140
141 return common.toArray(new String[0]);
142 }
143
144 public static Set<Variable> getVariables(Atom[] atoms) {
145 Set<Variable> v = new HashSet<Variable>();
146 for (Atom atom : atoms) atom.getVariables(v);
147 return v;
148 }
149
150 protected void addDatalogRule(DLClause clause) {
151 rules.add(clause);
152 datalogRuleText.append(RuleHelper.getText(clause)).append(Utility.LINE_SEPARATOR);
153 }
100 154
101 public String getDatalogRuleText() { 155 public String getDatalogRuleText() {
102 StringBuilder program = new StringBuilder(); 156 StringBuilder program = new StringBuilder();
103 program.append(prefixes.prefixesText()); 157 program.append(prefixes.prefixesText());
104 program.append(datalogRuleText.toString()); 158 program.append(datalogRuleText.toString());
105 return program.toString(); 159 return program.toString();
106 } 160 }
107 161
108 protected void addDerivedPredicate(MultiStageQueryEngine engine) { 162 protected void addDerivedPredicate(MultiStageQueryEngine engine) {
109 TupleIterator derivedTuples = null; 163 TupleIterator derivedTuples = null;
110 try { 164 try {
111 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?z where { ?x <" + Namespace.RDF_TYPE + "> ?z . }"); 165 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?z where { ?x <" + Namespace.RDF_TYPE + "> ?z . }");
112 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) { 166 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) {
113 String p = prefixes.expandIRI(RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0))); 167 String p = prefixes.expandIRI(RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0)));
114 updatedPredicates.add(AtomicConcept.create(p)); 168 updatedPredicates.add(AtomicConcept.create(p));
115 } 169 }
116 } catch (JRDFStoreException e) { 170 } catch (JRDFStoreException e) {
117 e.printStackTrace(); 171 e.printStackTrace();
118 } finally { 172 } finally {
119 if (derivedTuples != null) derivedTuples.dispose(); 173 if (derivedTuples != null) derivedTuples.dispose();
120 } 174 }
121 175
122 derivedTuples = null; 176 derivedTuples = null;
123 try { 177 try {
124 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?y where { ?x ?y ?z . }"); 178 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?y where { ?x ?y ?z . }");
125 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) { 179 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) {
126 String p = RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0)); 180 String p = RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0));
127 if (p.equals(Namespace.RDF_TYPE_ABBR) || p.equals(Namespace.RDF_TYPE_QUOTED)) ; 181 if (p.equals(Namespace.RDF_TYPE_ABBR) || p.equals(Namespace.RDF_TYPE_QUOTED)) ;
128 else if (p.equals(Namespace.EQUALITY_ABBR) || p.equals(Namespace.EQUALITY_QUOTED)); 182 else if (p.equals(Namespace.EQUALITY_ABBR) || p.equals(Namespace.EQUALITY_QUOTED));
129 else updatedPredicates.add(AtomicRole.create(prefixes.expandIRI(p))); 183 else updatedPredicates.add(AtomicRole.create(prefixes.expandIRI(p)));
130 } 184 }
131 } catch (JRDFStoreException e) { 185 } catch (JRDFStoreException e) {
132 e.printStackTrace(); 186 e.printStackTrace();
133 } finally { 187 } finally {
134 if (derivedTuples != null) derivedTuples.dispose(); 188 if (derivedTuples != null) derivedTuples.dispose();
135 } 189 }
136 190
137 } 191 }
138 192
139 public abstract Collection<Violation> isIntegrated(MultiStageQueryEngine engine, boolean incrementally); 193 public abstract Collection<Violation> isIntegrated(MultiStageQueryEngine engine, boolean incrementally);
140 194
141 protected Violation violate(MultiStageQueryEngine engine, DLClause clause, boolean incrementally) { 195 protected Violation violate(MultiStageQueryEngine engine, DLClause clause, boolean incrementally) {
142 Utility.logTrace("checking constraint: " + clause); 196 Utility.logTrace("checking constraint: " + clause);
143 197
144 String[] vars = getCommonVars(clause), subVars; 198 String[] vars = getCommonVars(clause), subVars;
145 199
146 Set<AnswerTupleID> headAnswers = new HashSet<AnswerTupleID>(); 200 Set<AnswerTupleID> headAnswers = new HashSet<AnswerTupleID>();
147 Set<Integer> rootAnswers = new HashSet<Integer>(); 201 Set<Integer> rootAnswers = new HashSet<Integer>();
148 202
149 Set<Atom> restHeadAtoms = new HashSet<Atom>(); 203 Set<Atom> restHeadAtoms = new HashSet<Atom>();
150 Atom rootAtom = null; 204 Atom rootAtom = null;
151 for (Atom atom: clause.getHeadAtoms()) 205 for (Atom atom: clause.getHeadAtoms())
152 if (rootAtom == null && atom.getDLPredicate() instanceof AtomicConcept && atom.getArgument(0).equals(X)) 206 if (rootAtom == null && atom.getDLPredicate() instanceof AtomicConcept && atom.getArgument(0).equals(X))
153 rootAtom = atom; 207 rootAtom = atom;
154 else 208 else
155 restHeadAtoms.add(atom); 209 restHeadAtoms.add(atom);
156 if (rootAtom != null) { 210 if (rootAtom != null) {
157 subVars = getVarSubset(vars, rootAtom); 211 subVars = getVarSubset(vars, rootAtom);
158 getHeadAnswers(engine, rootAtom, subVars, headAnswers); 212 getHeadAnswers(engine, rootAtom, subVars, headAnswers);
159 for (AnswerTupleID tuple: headAnswers) rootAnswers.add(tuple.getTerm(0)); 213 for (AnswerTupleID tuple : headAnswers) rootAnswers.add(tuple.getTerm(0));
160 headAnswers.clear(); 214 headAnswers.clear();
161 } 215 }
162 216
163 if (incrementally) { 217 if (incrementally) {
164 boolean affected = false; 218 boolean affected = false;
165 for (Atom bodyAtom: clause.getBodyAtoms()) 219 for (Atom bodyAtom: clause.getBodyAtoms())
166 if (updatedPredicates.contains(bodyAtom.getDLPredicate())) { 220 if (updatedPredicates.contains(bodyAtom.getDLPredicate())) {
167 affected = true; 221 affected = true;
168 break; 222 break;
169 } 223 }
170 224
171 for (Atom headAtom: clause.getHeadAtoms()) 225 for (Atom headAtom: clause.getHeadAtoms())
172 if (headAtom.getDLPredicate() instanceof AtLeastConcept && 226 if (headAtom.getDLPredicate() instanceof AtLeastConcept &&
173 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg")) 227 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg"))
174 if (updatedPredicates.contains(OverApproxExist.getNegationConcept(((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept())))) { 228 if (updatedPredicates.contains(OverApproxExist.getNegationConcept(((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept())))) {
175 affected = true; 229 affected = true;
176 break; 230 break;
177 } 231 }
178 232
179 if (!affected) return null; 233 if (!affected) return null;
180 } 234 }
181 235
182 LinkedList<AnswerTupleID> bodyAnswers = getBodyAnswers(engine, clause, vars, rootAnswers); 236 LinkedList<AnswerTupleID> bodyAnswers = getBodyAnswers(engine, clause, vars, rootAnswers);
183 rootAnswers.clear(); 237 rootAnswers.clear();
184 238
185 Utility.logTrace("Time to compute body answers: " + t.duration() + " number: " + bodyAnswers.size()); 239 Utility.logTrace("Time to compute body answers: " + t.duration() + " number: " + bodyAnswers.size());
186 240
187 if (bodyAnswers.isEmpty()) return null; 241 if (bodyAnswers.isEmpty()) return null;
188 242
189 t.reset(); 243 t.reset();
190 244
191 for (Atom headAtom: restHeadAtoms) { 245 for (Atom headAtom: restHeadAtoms) {
192// Timer subTimer = new Timer(); 246// Timer subTimer = new Timer();
193 subVars = getVarSubset(vars, headAtom); 247 subVars = getVarSubset(vars, headAtom);
194 248
195 // TODO: conditions check negative existential restrictions 249 // TODO: conditions check negative existential restrictions
196// if (false) { 250// if (false) {
197 if (headAtom.getDLPredicate() instanceof AtLeastConcept && 251 if (headAtom.getDLPredicate() instanceof AtLeastConcept &&
198 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg")) { 252 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg")) {
199 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate(); 253 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate();
200 String x = null, y = "Y"; 254 String x = null, y = "Y";
201 for (String var: subVars) 255 for (String var: subVars)
202 if (var != null) x = var; 256 if (var != null) x = var;
203 if (x == "Y") y = "X"; 257 if (x == "Y") y = "X";
204 Atom[] atoms = new Atom[2]; 258 Atom[] atoms = new Atom[2];
205 if (alc.getOnRole() instanceof AtomicRole) 259 if (alc.getOnRole() instanceof AtomicRole)
206 atoms[0] = Atom.create((AtomicRole) alc.getOnRole(), Variable.create(x), Variable.create(y)); 260 atoms[0] = Atom.create((AtomicRole) alc.getOnRole(), Variable.create(x), Variable.create(y));
207 else 261 else
208 atoms[0] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), Variable.create(y), Variable.create(x)); 262 atoms[0] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), Variable.create(y), Variable.create(x));
209 263
210 atoms[1] = Atom.create(OverApproxExist.getNegationConcept((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()), Variable.create(y)); 264 atoms[1] = Atom.create(OverApproxExist.getNegationConcept((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()), Variable.create(y));
211 Set<AnswerTupleID> addAnswers = new HashSet<AnswerTupleID>(); 265 Set<AnswerTupleID> addAnswers = new HashSet<AnswerTupleID>();
212 TupleIterator tuples = null; 266 TupleIterator tuples = null;
213 try { 267 try {
214 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(new Atom[] {atoms[0]}, x, y)); 268 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(new Atom[]{atoms[0]}, x, y));
215 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) 269 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext())
216 addAnswers.add(new AnswerTupleID(tuples)); 270 addAnswers.add(new AnswerTupleID(tuples));
217 } catch (JRDFStoreException e) { 271 } catch (JRDFStoreException e) {
218 e.printStackTrace(); 272 e.printStackTrace();
219 } finally { 273 } finally {
220 if (tuples != null) tuples.dispose(); 274 if (tuples != null) tuples.dispose();
221 } 275 }
222 276
223 tuples = null; 277 tuples = null;
224 try { 278 try {
225 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(atoms, x, y)); 279 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(atoms, x, y));
226 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) 280 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext())
227 addAnswers.remove(new AnswerTupleID(tuples)); 281 addAnswers.remove(new AnswerTupleID(tuples));
228 } catch (JRDFStoreException e) { 282 } catch (JRDFStoreException e) {
229 e.printStackTrace(); 283 e.printStackTrace();
@@ -235,140 +289,87 @@ public abstract class MultiStageUpperProgram {
235 headAnswers.add(project(tuple, new String[] {x, y}, new String[] {x, null})); 289 headAnswers.add(project(tuple, new String[] {x, y}, new String[] {x, null}));
236 addAnswers.clear(); 290 addAnswers.clear();
237 } 291 }
238 292
239 getHeadAnswers(engine, headAtom, subVars, headAnswers); 293 getHeadAnswers(engine, headAtom, subVars, headAnswers);
240 for (Iterator<AnswerTupleID> iter = bodyAnswers.iterator(); iter.hasNext(); ) 294 for (Iterator<AnswerTupleID> iter = bodyAnswers.iterator(); iter.hasNext(); )
241 if (headAnswers.contains(project(iter.next(), vars, subVars))) 295 if (headAnswers.contains(project(iter.next(), vars, subVars)))
242 iter.remove(); 296 iter.remove();
243 headAnswers.clear(); 297 headAnswers.clear();
244 } 298 }
245 299
246 Utility.logTrace("Time to rule out all head answers: " + t.duration() + " rest number: " + bodyAnswers.size()); 300 Utility.logTrace("Time to rule out all head answers: " + t.duration() + " rest number: " + bodyAnswers.size());
247 301
248 if (bodyAnswers.isEmpty()) return null; 302 if (bodyAnswers.isEmpty()) return null;
249 303
250 return new Violation(clause, bodyAnswers, vars); 304 return new Violation(clause, bodyAnswers, vars);
251 } 305 }
252 306
253 private void getHeadAnswers(MultiStageQueryEngine engine, Atom headAtom, String[] commonVars, Set<AnswerTupleID> headAnswers) { 307 private void getHeadAnswers(MultiStageQueryEngine engine, Atom headAtom, String[] commonVars, Set<AnswerTupleID> headAnswers) {
254 String headQuery = SparqlHelper.getSPARQLQuery(new Atom[] { headAtom }, commonVars); 308 String headQuery = SparqlHelper.getSPARQLQuery(new Atom[]{headAtom}, commonVars);
255 TupleIterator tuples = null; 309 TupleIterator tuples = null;
256 try { 310 try {
257 tuples = engine.internal_evaluateNotExpanded(headQuery); 311 tuples = engine.internal_evaluateNotExpanded(headQuery);
258 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) 312 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext())
259 headAnswers.add(new AnswerTupleID(tuples)); 313 headAnswers.add(new AnswerTupleID(tuples));
260 } catch (JRDFStoreException e) { 314 } catch (JRDFStoreException e) {
261 e.printStackTrace(); 315 e.printStackTrace();
262 } finally { 316 } finally {
263 if (tuples != null) tuples.dispose(); 317 if (tuples != null) tuples.dispose();
264 } 318 }
265 } 319 }
266 320
267 private Timer t = new Timer();
268
269 protected LinkedList<AnswerTupleID> getBodyAnswers(MultiStageQueryEngine engine, DLClause clause, String[] vars, Set<Integer> rootAnswers) { //, boolean incrementally) { 321 protected LinkedList<AnswerTupleID> getBodyAnswers(MultiStageQueryEngine engine, DLClause clause, String[] vars, Set<Integer> rootAnswers) { //, boolean incrementally) {
270 Collection<int[]> validIndexes = new LinkedList<int[]>(); 322 Collection<int[]> validIndexes = new LinkedList<int[]>();
271 323
272 int rootVarIndex = -1; 324 int rootVarIndex = -1;
273 for (int i = 0; i < vars.length; ++i) 325 for (int i = 0; i < vars.length; ++i)
274 if (vars[i].equals("X")) { rootVarIndex = i; break; } 326 if (vars[i].equals("X")) {
275 327 rootVarIndex = i;
276 String[] subVars; 328 break;
329 }
330
331 String[] subVars;
277 for (Atom headAtom: clause.getHeadAtoms()) { 332 for (Atom headAtom: clause.getHeadAtoms()) {
278 if ((headAtom.getDLPredicate() instanceof Equality || headAtom.getDLPredicate() instanceof AnnotatedEquality) 333 if ((headAtom.getDLPredicate() instanceof Equality || headAtom.getDLPredicate() instanceof AnnotatedEquality)
279 && headAtom.getArgument(0) instanceof Variable && headAtom.getArgument(1) instanceof Variable) { 334 && headAtom.getArgument(0) instanceof Variable && headAtom.getArgument(1) instanceof Variable) {
280 int[] validIndex = new int[2]; 335 int[] validIndex = new int[2];
281 subVars = getVarSubset(vars, headAtom); 336 subVars = getVarSubset(vars, headAtom);
282 for (int i = 0, j = 0; i < subVars.length; ++i) 337 for (int i = 0, j = 0; i < subVars.length; ++i)
283 if (subVars[i] != null) 338 if (subVars[i] != null)
284 validIndex[j++] = i; 339 validIndex[j++] = i;
285 validIndexes.add(validIndex); 340 validIndexes.add(validIndex);
286 } 341 }
287 } 342 }
288 343
289 t.reset(); 344 t.reset();
290 345
291 LinkedList<AnswerTupleID> bodyAnswers = new LinkedList<AnswerTupleID>(); 346 LinkedList<AnswerTupleID> bodyAnswers = new LinkedList<AnswerTupleID>();
292 String bodyQuery = SparqlHelper.getSPARQLQuery(clause.getBodyAtoms(), vars); 347 String bodyQuery = SparqlHelper.getSPARQLQuery(clause.getBodyAtoms(), vars);
293 TupleIterator bodyTuples = null; 348 TupleIterator bodyTuples = null;
294 349
295 boolean filtered; 350 boolean filtered;
296 try { 351 try {
297 bodyTuples = engine.internal_evaluateNotExpanded(bodyQuery); 352 bodyTuples = engine.internal_evaluateNotExpanded(bodyQuery);
298 for (long multi = bodyTuples.open(); multi != 0; multi = bodyTuples.getNext()) { 353 for (long multi = bodyTuples.open(); multi != 0; multi = bodyTuples.getNext()) {
299 filtered = false; 354 filtered = false;
300 if (rootVarIndex >= 0 && rootAnswers.contains(bodyTuples.getResourceID(rootVarIndex))) continue; 355 if (rootVarIndex >= 0 && rootAnswers.contains(bodyTuples.getResourceID(rootVarIndex))) continue;
301 for (int[] validIndex: validIndexes) 356 for (int[] validIndex: validIndexes)
302 if (bodyTuples.getResourceID(validIndex[0]) == bodyTuples.getResourceID(validIndex[1])) { 357 if (bodyTuples.getResourceID(validIndex[0]) == bodyTuples.getResourceID(validIndex[1])) {
303 filtered = true; 358 filtered = true;
304 break; 359 break;
305 } 360 }
306 if (!filtered) 361 if (!filtered)
307 bodyAnswers.add(new AnswerTupleID(bodyTuples)); 362 bodyAnswers.add(new AnswerTupleID(bodyTuples));
308 } 363 }
309 } catch (JRDFStoreException e) { 364 } catch (JRDFStoreException e) {
310 e.printStackTrace(); 365 e.printStackTrace();
311 } finally { 366 } finally {
312 if (bodyTuples != null) bodyTuples.dispose(); 367 if (bodyTuples != null) bodyTuples.dispose();
313 } 368 }
314
315 validIndexes.clear();
316 Utility.logTrace("Time to get all body answers: " + t.duration());
317 return bodyAnswers;
318 }
319
320 public static AnswerTupleID project(AnswerTupleID tuple, String[] vars, String[] subVars) {
321 int subArity = 0;
322 for (int i = 0; i < subVars.length; ++i)
323 if (subVars[i] != null) ++subArity;
324
325 if (tuple.getArity() == subArity)
326 return tuple;
327
328 AnswerTupleID newTuple = new AnswerTupleID(subArity);
329 for (int i = 0, j = 0; i < vars.length; ++i)
330 if (subVars[i] != null && !subVars[i].isEmpty()) {
331 newTuple.setTerm(j++, tuple.getTerm(i));
332 }
333
334 return newTuple;
335 }
336
337 public static String[] getVarSubset(String[] vars, Atom... atoms) {
338 String[] newVars = new String[vars.length];
339 Set<Variable> allVars = new HashSet<Variable>();
340 int arity;
341 for (Atom atom: atoms) {
342 arity = atom.getArity();
343 if (atom.getDLPredicate() instanceof AnnotatedEquality) arity = 2;
344 for (int j = 0; j < arity; ++j)
345 if (atom.getArgument(j) instanceof Variable) {
346 allVars.add(atom.getArgumentVariable(j));
347 }
348 }
349
350 for (int i = 0; i < vars.length; ++i) {
351 newVars[i] = allVars.contains(Variable.create(vars[i])) ? vars[i] : null;
352 }
353
354 return newVars;
355 }
356
357 public static String[] getCommonVars(DLClause clause) {
358 Set<Variable> headVars = getVariables(clause.getHeadAtoms());
359 Set<Variable> bodyVars = getVariables(clause.getBodyAtoms());
360 369
361 Collection<String> common = new LinkedList<String>(); 370 validIndexes.clear();
362 for (Variable v: headVars) 371 Utility.logTrace("Time to get all body answers: " + t.duration());
363 if (bodyVars.contains(v)) common.add(v.getName()); 372 return bodyAnswers;
364
365 return common.toArray(new String[0]);
366 }
367
368 public static Set<Variable> getVariables(Atom[] atoms) {
369 Set<Variable> v = new HashSet<Variable>();
370 for (Atom atom: atoms) atom.getVariables(v);
371 return v;
372 } 373 }
373 374
374 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause) { 375 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause) {
@@ -376,11 +377,9 @@ public abstract class MultiStageUpperProgram {
376 } 377 }
377 378
378 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause, List<AnswerTupleID> violationTuples) { 379 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause, List<AnswerTupleID> violationTuples) {
379 return m_bottom.process(m_approxExist.convert(clause, originalDLClause, null)); 380 return m_bottom.process(m_approxExist.convert(clause, originalDLClause, violationTuples));
380 } 381 }
381 382
382
383
384 public void save(String filename) { 383 public void save(String filename) {
385 try { 384 try {
386 BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename))); 385 BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename)));
@@ -395,11 +394,9 @@ public abstract class MultiStageUpperProgram {
395 e.printStackTrace(); 394 e.printStackTrace();
396 } catch (IOException e) { 395 } catch (IOException e) {
397 e.printStackTrace(); 396 e.printStackTrace();
398 } 397 }
399 } 398 }
400 399
401 Set<DLPredicate> updatedPredicates = new HashSet<DLPredicate>();
402
403// public void addUpdatedPredicates(Set<DLPredicate> predicates) { 400// public void addUpdatedPredicates(Set<DLPredicate> predicates) {
404// for (Iterator<DLPredicate> iter = predicates.iterator(); iter.hasNext(); ) { 401// for (Iterator<DLPredicate> iter = predicates.iterator(); iter.hasNext(); ) {
405// updatedPredicate.add(iter.next()); 402// updatedPredicate.add(iter.next());
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java
index 174175f..b16e645 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java
@@ -22,59 +22,48 @@ public class RestrictedApplication extends MultiStageUpperProgram {
22 clauses.addAll(constraints); 22 clauses.addAll(constraints);
23 } 23 }
24 24
25 private void addNegativeDatalogRules() { 25 // It should be shifting
26 Collection<DLClause> allRules = new LinkedList<DLClause>(rules);
27 allRules.addAll(constraints);
28 for (DLClause clause: allRules) {
29 for (DLClause newClause: addAdditionalDatalogRules(clause, m_bottom, norm))
30 addDatalogRule(newClause);
31 }
32 allRules.clear();
33 }
34
35 // It should be c-Skolemisation + shifting
36 public static Collection<DLClause> addAdditionalDatalogRules(DLClause clause, BottomStrategy bottom, Normalisation norm) { 26 public static Collection<DLClause> addAdditionalDatalogRules(DLClause clause, BottomStrategy bottom, Normalisation norm) {
37 LinkedList<DLClause> newClauses = new LinkedList<DLClause>(); 27 LinkedList<DLClause> newClauses = new LinkedList<DLClause>();
38 Atom[] headAtoms = clause.getHeadAtoms(); 28 Atom[] headAtoms = clause.getHeadAtoms();
39 Atom[] bodyAtoms = clause.getBodyAtoms(); 29 Atom[] bodyAtoms = clause.getBodyAtoms();
40 int headLength = headAtoms.length; 30 int headLength = headAtoms.length;
41 int bodyLength = bodyAtoms.length; 31 int bodyLength = bodyAtoms.length;
42 DLClause tClause; 32 DLClause tClause;
43 if (bottom.isBottomRule(clause)) { 33 if (bottom.isBottomRule(clause)) {
44 if (clause.getBodyLength() == 1) return newClauses; 34 if (clause.getBodyLength() == 1) return newClauses;
45 for (int i = 0; i < bodyLength; ++i) { 35 for (int i = 0; i < bodyLength; ++i) {
46 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) { 36 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) {
47 AtomicConcept ac = (AtomicConcept) bodyAtoms[i].getDLPredicate(); 37 AtomicConcept ac = (AtomicConcept) bodyAtoms[i].getDLPredicate();
48 if (!ac.getIRI().endsWith("_exist")) { 38 if (!ac.getIRI().endsWith("_exist")) {
49 Atom[] newBodyAtoms = new Atom[bodyLength - 1]; 39 Atom[] newBodyAtoms = new Atom[bodyLength - 1];
50 for (int j = 0; j < bodyLength - 1; ++j) 40 for (int j = 0; j < bodyLength - 1; ++j)
51 newBodyAtoms[j] = j < i ? bodyAtoms[j] : bodyAtoms[j + 1]; 41 newBodyAtoms[j] = j < i ? bodyAtoms[j] : bodyAtoms[j + 1];
52 42
53 Atom negativeAtom = getNegativeAtom(bodyAtoms[i]); 43 Atom negativeAtom = getNegativeAtom(bodyAtoms[i]);
54 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms); 44 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms);
55 // addDatalogRule(tClause); 45 // addDatalogRule(tClause);
56 newClauses.add(tClause); 46 newClauses.add(tClause);
57 } 47 } else {
58 else {
59 Atom[] newBodyAtoms = new Atom[bodyLength]; 48 Atom[] newBodyAtoms = new Atom[bodyLength];
60 Atom negativeAtom = null; 49 Atom negativeAtom = null;
61 org.semanticweb.HermiT.model.Variable E = org.semanticweb.HermiT.model.Variable.create("E"); 50 org.semanticweb.HermiT.model.Variable E = org.semanticweb.HermiT.model.Variable.create("E");
62 AtLeastConcept alc = norm.getLeftAtLeastConcept(ac); 51 AtLeastConcept alc = norm.getLeftAtLeastConcept(ac);
63 52
64 if (alc.getOnRole() instanceof AtomicRole) 53 if (alc.getOnRole() instanceof AtomicRole)
65 newBodyAtoms[i] = Atom.create((AtomicRole) alc.getOnRole(), bodyAtoms[i].getArgument(0), E); 54 newBodyAtoms[i] = Atom.create((AtomicRole) alc.getOnRole(), bodyAtoms[i].getArgument(0), E);
66 else 55 else
67 newBodyAtoms[i] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), E, bodyAtoms[i].getArgument(0)); 56 newBodyAtoms[i] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), E, bodyAtoms[i].getArgument(0));
68 if (alc.getToConcept() instanceof AtomicConcept) 57 if (alc.getToConcept() instanceof AtomicConcept)
69 negativeAtom = getNegativeAtom(Atom.create((AtomicConcept) alc.getToConcept(), E)); 58 negativeAtom = getNegativeAtom(Atom.create((AtomicConcept) alc.getToConcept(), E));
70 else 59 else
71 negativeAtom = getNegativeAtom(Atom.create(((AtomicNegationConcept) alc.getToConcept()).getNegatedAtomicConcept(), E)); 60 negativeAtom = getNegativeAtom(Atom.create(((AtomicNegationConcept) alc.getToConcept()).getNegatedAtomicConcept(), E));
72 61
73 for (int j = 0; j < bodyLength; ++j) 62 for (int j = 0; j < bodyLength; ++j)
74 if (i != j) 63 if (i != j)
75 newBodyAtoms[j] = bodyAtoms[j]; 64 newBodyAtoms[j] = bodyAtoms[j];
65
76 66
77
78 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms); 67 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms);
79 // addDatalogRule(tClause); 68 // addDatalogRule(tClause);
80 newClauses.add(tClause); 69 newClauses.add(tClause);
@@ -84,55 +73,65 @@ public class RestrictedApplication extends MultiStageUpperProgram {
84 } 73 }
85 else if (headLength > 1) { 74 else if (headLength > 1) {
86 for (int i = 0; i < headLength; ++i) { 75 for (int i = 0; i < headLength; ++i) {
87 DLPredicate p = headAtoms[i].getDLPredicate(); 76 DLPredicate p = headAtoms[i].getDLPredicate();
88 if (!(p instanceof AtomicConcept)) { 77 if (!(p instanceof AtomicConcept)) {
89 return newClauses; 78 return newClauses;
90 } 79 }
91 } 80 }
92 81
93 for (int i = 0; i < headLength; ++i) { 82 for (int i = 0; i < headLength; ++i) {
94 Atom[] newBodyAtoms = new Atom[headLength + bodyLength - 1]; 83 Atom[] newBodyAtoms = new Atom[headLength + bodyLength - 1];
95 for (int j = 0; j < headLength + bodyLength - 1; ++j) 84 for (int j = 0; j < headLength + bodyLength - 1; ++j)
96 newBodyAtoms[j] = j < bodyLength ? bodyAtoms[j] : 85 newBodyAtoms[j] = j < bodyLength ? bodyAtoms[j] :
97 j < bodyLength + i ? getNegativeAtom(headAtoms[j - bodyLength]) : 86 j < bodyLength + i ? getNegativeAtom(headAtoms[j - bodyLength]) :
98 getNegativeAtom(headAtoms[j - bodyLength + 1]); 87 getNegativeAtom(headAtoms[j - bodyLength + 1]);
99 88
100 tClause = DLClause.create(new Atom[] { headAtoms[i] }, newBodyAtoms); 89 tClause = DLClause.create(new Atom[]{headAtoms[i]}, newBodyAtoms);
101// addDatalogRule(tClause); 90// addDatalogRule(tClause);
102 newClauses.add(tClause); 91 newClauses.add(tClause);
103 } 92 }
104 } 93 }
105 else if (headLength == 1) { 94 else if (headLength == 1) {
106 DLPredicate p = clause.getHeadAtom(0).getDLPredicate(); 95 DLPredicate p = clause.getHeadAtom(0).getDLPredicate();
107 if (p instanceof AtomicConcept) { 96 if (p instanceof AtomicConcept) {
108 Atom negativeHeadAtom = getNegativeAtom(clause.getHeadAtom(0)); 97 Atom negativeHeadAtom = getNegativeAtom(clause.getHeadAtom(0));
109 for (int i = 0; i < bodyLength; ++i) 98 for (int i = 0; i < bodyLength; ++i)
110 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) { 99 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) {
111 Atom[] newBodyAtoms = new Atom[clause.getBodyLength()]; 100 Atom[] newBodyAtoms = new Atom[clause.getBodyLength()];
112 newBodyAtoms[0] = negativeHeadAtom; 101 newBodyAtoms[0] = negativeHeadAtom;
113 for (int j = 1; j < bodyLength; ++j) 102 for (int j = 1; j < bodyLength; ++j)
114 newBodyAtoms[j] = j <= i ? bodyAtoms[j - 1] : bodyAtoms[j]; 103 newBodyAtoms[j] = j <= i ? bodyAtoms[j - 1] : bodyAtoms[j];
115 104
116 tClause = DLClause.create(new Atom[] {getNegativeAtom(bodyAtoms[i])}, newBodyAtoms); 105 tClause = DLClause.create(new Atom[]{getNegativeAtom(bodyAtoms[i])}, newBodyAtoms);
117// addDatalogRule(tClause); 106// addDatalogRule(tClause);
118 newClauses.add(tClause); 107 newClauses.add(tClause);
119 } 108 }
120 } 109 }
121 else if (p instanceof AtLeastConcept && clause.getBodyLength() == 1 && clause.getBodyAtom(0).getDLPredicate() instanceof AtomicConcept) { 110 else if (p instanceof AtLeastConcept && clause.getBodyLength() == 1 && clause.getBodyAtom(0).getDLPredicate() instanceof AtomicConcept) {
122 AtLeastConcept alc = (AtLeastConcept) p; 111 AtLeastConcept alc = (AtLeastConcept) p;
123 AtomicConcept ac = norm.getLeftAuxiliaryConcept(alc, true); 112 AtomicConcept ac = norm.getLeftAuxiliaryConcept(alc, true);
124 if (ac != null) { 113 if (ac != null) {
125 Atom bodyAtom = clause.getBodyAtom(0); 114 Atom bodyAtom = clause.getBodyAtom(0);
126// addDatalogRule(DLClause.create(new Atom[] {getNegativeAtom(bodyAtom)}, 115// addDatalogRule(DLClause.create(new Atom[] {getNegativeAtom(bodyAtom)},
127// new Atom[] {getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} )); 116// new Atom[] {getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} ));
128 newClauses.add(DLClause.create(new Atom[] {getNegativeAtom(bodyAtom)}, 117 newClauses.add(DLClause.create(new Atom[]{getNegativeAtom(bodyAtom)},
129 new Atom[] {getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} )); 118 new Atom[]{getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))}));
130 } 119 }
131 } 120 }
132 } 121 }
133 return newClauses; 122 return newClauses;
134 } 123 }
135 124
125 private void addNegativeDatalogRules() {
126 Collection<DLClause> allRules = new LinkedList<DLClause>(rules);
127 allRules.addAll(constraints);
128 for (DLClause clause : allRules) {
129 for (DLClause newClause : addAdditionalDatalogRules(clause, m_bottom, norm))
130 addDatalogRule(newClause);
131 }
132 allRules.clear();
133 }
134
136 public Normalisation getNormalisation() { 135 public Normalisation getNormalisation() {
137 return norm; 136 return norm;
138 } 137 }
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
index 3917efc..244eb7a 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
@@ -18,41 +18,34 @@ import uk.ac.ox.cs.pagoda.util.Utility;
18import java.util.*; 18import java.util.*;
19 19
20public abstract class Pick4NegativeConcept implements Treatment { 20public abstract class Pick4NegativeConcept implements Treatment {
21 21
22 MultiStageQueryEngine engine; 22 public Set<Atom> addedGroundAtoms = new HashSet<Atom>();
23 MultiStageUpperProgram program; 23 MultiStageQueryEngine engine;
24 RDFoxTripleManager tripleManager; 24 MultiStageUpperProgram program;
25 RDFoxTripleManager tripleManager;
25 PredicateDependency dependencyGraph; 26 PredicateDependency dependencyGraph;
26 boolean addGap = false; 27 boolean addGap = false;
27 28
28 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) { 29 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) {
29 this.engine = store; 30 this.engine = store;
30 this.program = multiProgram; 31 this.program = multiProgram;
31 this.tripleManager = new RDFoxTripleManager(store.getDataStore(), true); 32 this.tripleManager = new RDFoxTripleManager(store.getDataStore(), true);
32 } 33 }
33 34
34 void addTripleByID(Atom atom, Atom gapAtom, Map<Variable, Integer> assignment) { 35 void addTripleByID(Atom atom, Atom gapAtom, Map<Variable, Integer> assignment) {
35 int[] newTuple = tripleManager.getInstance(atom, assignment); 36 int[] newTuple = tripleManager.getInstance(atom, assignment);
36 tripleManager.addTripleByID(newTuple); 37 tripleManager.addTripleByID(newTuple);
37 if (addGap) 38 if (addGap)
38 tripleManager.addTripleByID(tripleManager.getInstance(gapAtom, assignment)); 39 tripleManager.addTripleByID(tripleManager.getInstance(gapAtom, assignment));
39 } 40 }
40 41
41 public Set<Atom> addedGroundAtoms = new HashSet<Atom>();
42
43 protected boolean makeSatisfied(Violation violation, Comparator<Atom> comp) { 42 protected boolean makeSatisfied(Violation violation, Comparator<Atom> comp) {
44 LinkedList<AnswerTupleID> tuples = violation.getTuples(); 43 LinkedList<AnswerTupleID> tuples = violation.getTuples();
45 DLClause constraint = violation.getConstraint(); 44 DLClause constraint = violation.getConstraint();
46 Map<Variable, Integer> assignment = new HashMap<Variable, Integer>(); 45 Map<Variable, Integer> assignment = new HashMap<Variable, Integer>();
47 46
48 if (constraint.getHeadLength() > 1) { 47 if (constraint.getHeadLength() > 1) {
49 Atom[] orderedAtoms = new Atom[constraint.getHeadLength()]; 48 Atom[] orderedAtoms = Arrays.copyOf(constraint.getHeadAtoms(), constraint.getHeadLength());
50 int index = 0;
51
52 for (Atom headAtom: constraint.getHeadAtoms()) {
53 orderedAtoms[index++] = headAtom;
54 }
55
56 Arrays.sort(orderedAtoms, comp); 49 Arrays.sort(orderedAtoms, comp);
57 50
58 Set<AnswerTupleID> negTuples = new HashSet<AnswerTupleID>(); 51 Set<AnswerTupleID> negTuples = new HashSet<AnswerTupleID>();
@@ -86,10 +79,9 @@ public abstract class Pick4NegativeConcept implements Treatment {
86 AnswerTupleID lastAdded = null; 79 AnswerTupleID lastAdded = null;
87 80
88 for (Iterator<AnswerTupleID> iter = tuples.iterator(); iter.hasNext(); ) { 81 for (Iterator<AnswerTupleID> iter = tuples.iterator(); iter.hasNext(); ) {
89 82
90 AnswerTupleID tuple = iter.next(); 83 AnswerTupleID tuple = iter.next();
91 if (negTuples.contains(MultiStageUpperProgram.project(tuple, violation.getVariables(), subVars))) ; 84 if (!negTuples.contains(MultiStageUpperProgram.project(tuple, violation.getVariables(), subVars))) {
92 else {
93 if (lastAdded == null || tComp.compare(lastAdded, tuple) != 0) { 85 if (lastAdded == null || tComp.compare(lastAdded, tuple) != 0) {
94 lastAdded = tuple; 86 lastAdded = tuple;
95 tuple.getAssignment(violation.getVariables(), assignment); 87 tuple.getAssignment(violation.getVariables(), assignment);
@@ -103,29 +95,26 @@ public abstract class Pick4NegativeConcept implements Treatment {
103 if (tuples.isEmpty()) 95 if (tuples.isEmpty())
104 return true; 96 return true;
105 } 97 }
106 98 if (!tuples.isEmpty()) return false;
107 if (!tuples.isEmpty()) return false;
108
109 } 99 }
110 else { 100 else {
111 Set<Atom> headAtoms = new HashSet<Atom>(); 101 Set<Atom> headAtoms = new HashSet<Atom>();
112 for (DLClause clause: program.convertExist(constraint, violation.getClause())) { 102 for (DLClause clause : program.convertExist(constraint, violation.getClause(), violation.getTuples())) {
113 if (DLClauseHelper.hasSubsetBodyAtoms(clause, constraint)) { 103
114 Atom tHeadAtom = clause.getHeadAtom(0); 104 if (!DLClauseHelper.hasSubsetBodyAtoms(clause, constraint)) {
115 Atom tGapHeadAtom = addGap ? getGapAtom(tHeadAtom) : null; 105 Utility.logError("There might be an error here... Cannot happen!!!");
116 if (DLClauseHelper.isGround(tHeadAtom)) { 106 throw new Error("This condition should not happen!!!");
117 if (!addedGroundAtoms.contains(tHeadAtom)) {
118 program.addUpdatedPredicate(tHeadAtom.getDLPredicate());
119 addTripleByID(tHeadAtom, tGapHeadAtom, null);
120 addedGroundAtoms.add(tHeadAtom);
121 }
122 }
123 else headAtoms.add(tHeadAtom);
124 }
125 else {
126 Utility.logError("There might be an error here... Can't happend!!!");
127 throw new Error("This condition should not happen!");
128 } 107 }
108
109 Atom tHeadAtom = clause.getHeadAtom(0);
110 Atom tGapHeadAtom = addGap ? getGapAtom(tHeadAtom) : null;
111 if (DLClauseHelper.isGround(tHeadAtom)) {
112 if (!addedGroundAtoms.contains(tHeadAtom)) {
113 program.addUpdatedPredicate(tHeadAtom.getDLPredicate());
114 addTripleByID(tHeadAtom, tGapHeadAtom, null);
115 addedGroundAtoms.add(tHeadAtom);
116 }
117 } else headAtoms.add(tHeadAtom);
129 } 118 }
130 if (!tuples.isEmpty()) 119 if (!tuples.isEmpty())
131 for (Atom atom: headAtoms) 120 for (Atom atom: headAtoms)
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
index 7999daa..3c0a001 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
@@ -44,31 +44,36 @@ public class MyQueryReasoner extends QueryReasoner {
44 44
45 boolean equalityTag; 45 boolean equalityTag;
46 boolean multiStageTag; 46 boolean multiStageTag;
47 47 TrackingRuleEncoder encoder;
48 Timer t = new Timer();
49 private Collection<String> predicatesWithGap = null;
50 private Boolean satisfiable;
51 private ConsistencyManager consistency = new ConsistencyManager(this);
52
48 public MyQueryReasoner() { 53 public MyQueryReasoner() {
49 setup(true, true); 54 setup(true, true);
50 } 55 }
51 56
52 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { 57 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
53 setup(multiStageTag, considerEqualities); 58 setup(multiStageTag, considerEqualities);
54 } 59 }
55 60
56 private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { 61 private BasicQueryEngine getUpperStore(String name, boolean checkValidity) {
57 if (multiStageTag) 62 if (multiStageTag)
58 return new MultiStageQueryEngine(name, checkValidity); 63 return new MultiStageQueryEngine(name, checkValidity);
59// return new TwoStageQueryEngine(name, checkValidity); 64// return new TwoStageQueryEngine(name, checkValidity);
60 else 65 else
61 return new BasicQueryEngine(name); 66 return new BasicQueryEngine(name);
62 } 67 }
63 68
64 public void setup(boolean multiStageTag, boolean considerEqualities) { 69 public void setup(boolean multiStageTag, boolean considerEqualities) {
65 satisfiable = null; 70 satisfiable = null;
66 this.multiStageTag = multiStageTag; 71 this.multiStageTag = multiStageTag;
67 this.equalityTag = considerEqualities; 72 this.equalityTag = considerEqualities;
68 73
69 rlLowerStore = new BasicQueryEngine("rl-lower-bound"); 74 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
70 elLowerStore = new KarmaQueryEngine("elho-lower-bound"); 75 elLowerStore = new KarmaQueryEngine("elho-lower-bound");
71 76
72 trackingStore = getUpperStore("tracking", false); 77 trackingStore = getUpperStore("tracking", false);
73 } 78 }
74 79
@@ -80,113 +85,108 @@ public class MyQueryReasoner extends QueryReasoner {
80 elLowerStore.importRDFData(name, datafile); 85 elLowerStore.importRDFData(name, datafile);
81 trackingStore.importRDFData(name, datafile); 86 trackingStore.importRDFData(name, datafile);
82 } 87 }
83 88
84 @Override 89 @Override
85 public void loadOntology(OWLOntology o) { 90 public void loadOntology(OWLOntology o) {
86 if (!equalityTag) { 91 if (!equalityTag) {
87 EqualitiesEliminator eliminator = new EqualitiesEliminator(o); 92 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
88 o = eliminator.getOutputOntology(); 93 o = eliminator.getOutputOntology();
89 eliminator.save(); 94 eliminator.save();
90 } 95 }
91 96
92 ontology = o; 97 ontology = o;
93 program = new DatalogProgram(ontology, properties.getToClassify()); 98 program = new DatalogProgram(ontology, properties.getToClassify());
94// program.getLower().save(); 99// program.getLower().save();
95// program.getUpper().save(); 100// program.getUpper().save();
96// program.getGeneral().save(); 101// program.getGeneral().save();
97 102
98 if (multiStageTag && !program.getGeneral().isHorn()) { 103 if (multiStageTag && !program.getGeneral().isHorn()) {
99 lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); // 104 lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); //
100 } 105 }
101 106
107 // TODO add new upper store creation
108
102 importData(program.getAdditionalDataFile()); 109 importData(program.getAdditionalDataFile());
103 110
104 elho_ontology = new ELHOProfile().getFragment(ontology); 111 elho_ontology = new ELHOProfile().getFragment(ontology);
105 elLowerStore.processOntology(elho_ontology); 112 elLowerStore.processOntology(elho_ontology);
106 } 113 }
107
108 private Collection<String> predicatesWithGap = null;
109 114
110 public Collection<String> getPredicatesWithGap() { 115 public Collection<String> getPredicatesWithGap() {
111 return predicatesWithGap; 116 return predicatesWithGap;
112 } 117 }
113 118
114 @Override 119 @Override
115 public boolean preprocess() { 120 public boolean preprocess() {
116 t.reset(); 121 t.reset();
117 Utility.logInfo("Preprocessing ... checking satisfiability ... "); 122 Utility.logInfo("Preprocessing ... checking satisfiability ... ");
118 123
119 String name = "data", datafile = importedData.toString(); 124 String name = "data", datafile = importedData.toString();
120 rlLowerStore.importRDFData(name, datafile); 125 rlLowerStore.importRDFData(name, datafile);
121 rlLowerStore.materialise("lower program", program.getLower().toString()); 126 rlLowerStore.materialise("lower program", program.getLower().toString());
122// program.getLower().save(); 127// program.getLower().save();
123 if (!consistency.checkRLLowerBound()) return false; 128 if (!consistency.checkRLLowerBound()) return false;
124 Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); 129 Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber());
125 130
126 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); 131 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
127 132
128 elLowerStore.importRDFData(name, datafile); 133 elLowerStore.importRDFData(name, datafile);
129 elLowerStore.materialise("saturate named individuals", originalMarkProgram); 134 elLowerStore.materialise("saturate named individuals", originalMarkProgram);
130 elLowerStore.materialise("lower program", program.getLower().toString()); 135 elLowerStore.materialise("lower program", program.getLower().toString());
131 elLowerStore.initialiseKarma(); 136 elLowerStore.initialiseKarma();
132 if (!consistency.checkELLowerBound()) return false; 137 if (!consistency.checkELLowerBound()) return false;
133 138
134 if (lazyUpperStore != null) { 139 if (lazyUpperStore != null) {
135 lazyUpperStore.importRDFData(name, datafile); 140 lazyUpperStore.importRDFData(name, datafile);
136 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); 141 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram);
137 int tag = lazyUpperStore.materialiseRestrictedly(program, null); 142 int tag = lazyUpperStore.materialiseRestrictedly(program, null);
138 if (tag != 1) { 143 if (tag != 1) {
139 lazyUpperStore.dispose(); 144 lazyUpperStore.dispose();
140 lazyUpperStore = null; 145 lazyUpperStore = null;
141 } 146 }
142 if (tag == -1) return false; 147 if (tag == -1) return false;
143 } 148 }
144 if (consistency.checkLazyUpper()) { 149 if (consistency.checkLazyUpper()) {
145 satisfiable = true; 150 satisfiable = true;
146 Utility.logInfo("time for satisfiability checking: " + t.duration()); 151 Utility.logInfo("time for satisfiability checking: " + t.duration());
147 } 152 }
148 153
154 // TODO add new upper store preprocessing
155
149 trackingStore.importRDFData(name, datafile); 156 trackingStore.importRDFData(name, datafile);
150 trackingStore.materialise("saturate named individuals", originalMarkProgram); 157 trackingStore.materialise("saturate named individuals", originalMarkProgram);
151 158
152// materialiseFullUpper(); 159// materialiseFullUpper();
153 GapByStore4ID gap = new GapByStore4ID(trackingStore); 160 GapByStore4ID gap = new GapByStore4ID(trackingStore);
154 trackingStore.materialiseFoldedly(program, gap); 161 trackingStore.materialiseFoldedly(program, gap);
155 predicatesWithGap = gap.getPredicatesWithGap(); 162 predicatesWithGap = gap.getPredicatesWithGap();
156 gap.clear(); 163 gap.clear();
157 164
158 if (program.getGeneral().isHorn()) 165 if (program.getGeneral().isHorn())
159 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); 166 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore);
160 else 167 else
161 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); 168 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore);
162// encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); 169// encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore);
163// encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); 170// encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore);
164// encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); 171// encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore);
165 172
166 program.deleteABoxTurtleFile(); 173 program.deleteABoxTurtleFile();
167 174
168 if (!isConsistent()) 175 if (!isConsistent())
169 return false; 176 return false;
170 177
171 consistency.extractBottomFragment(); 178 consistency.extractBottomFragment();
172 return true; 179 return true;
173 } 180 }
174 181
175 private Boolean satisfiable;
176 private ConsistencyManager consistency = new ConsistencyManager(this);
177
178 TrackingRuleEncoder encoder;
179
180 @Override 182 @Override
181 public boolean isConsistent() { 183 public boolean isConsistent() {
182 if (satisfiable == null) { 184 if (satisfiable == null) {
183 satisfiable = consistency.check(); 185 satisfiable = consistency.check();
184 Utility.logInfo("time for satisfiability checking: " + t.duration()); 186 Utility.logInfo("time for satisfiability checking: " + t.duration());
185 } 187 }
186 return satisfiable; 188 return satisfiable;
187 } 189 }
188
189 Timer t = new Timer();
190 190
191 private OWLOntology relevantPart(QueryRecord queryRecord) { 191 private OWLOntology relevantPart(QueryRecord queryRecord) {
192 AnswerTuples rlAnswer = null, elAnswer = null; 192 AnswerTuples rlAnswer = null, elAnswer = null;
@@ -209,22 +209,29 @@ public class MyQueryReasoner extends QueryReasoner {
209 209
210 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 210 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
211 211
212 // TODO log correct partial answers
213// Utility.logDebug(toJson("upperBound1", queryRecord));
214 if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0])) { 212 if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0])) {
215 queryUpperBound(upperStore, queryRecord, extendedQuery[0], queryRecord.getAnswerVariables()); 213 queryUpperBound(upperStore, queryRecord, extendedQuery[0], queryRecord.getAnswerVariables());
216// Utility.logDebug(toJson("upperBound2", queryRecord));
217 } 214 }
218 if (!queryRecord.processed() && queryRecord.hasNonAnsDistinguishedVariables()) { 215 if (!queryRecord.processed() && queryRecord.hasNonAnsDistinguishedVariables()) {
219 queryUpperBound(upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables()); 216 queryUpperBound(upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables());
220// Utility.logDebug(toJson("upperBound3", queryRecord));
221 } 217 }
222 218
219 Utility.logDebug(toJsonKeyValuePair("upperBound1", queryRecord));
220
221 // TODO check whether it is harmful. In case is not, implement it properly
222 // BEGIN: trying to intersect
223 if (!queryRecord.isBottom() && lazyUpperStore != null) {
224 queryUpperBound(trackingStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
225 }
226 // END: trying to intersect
227
223 queryRecord.addProcessingTime(Step.UpperBound, t.duration()); 228 queryRecord.addProcessingTime(Step.UpperBound, t.duration());
224 if (queryRecord.processed()) { 229 if (queryRecord.processed()) {
225 queryRecord.setDifficulty(Step.UpperBound); 230 queryRecord.setDifficulty(Step.UpperBound);
226 return null; 231 return null;
227 } 232 }
233
234 // TODO add evaluation on new upper store
228 235
229 t.reset(); 236 t.reset();
230 try { 237 try {
@@ -280,7 +287,7 @@ public class MyQueryReasoner extends QueryReasoner {
280 287
281// int counter = 0; 288// int counter = 0;
282 289
283 private String toJson(String key, Object value) { 290 private String toJsonKeyValuePair(String key, Object value) {
284 HashMap<String, Object> map = new HashMap<>(); 291 HashMap<String, Object> map = new HashMap<>();
285 map.put(key, value); 292 map.put(key, value);
286 return QueryRecord.GsonCreator.getInstance().toJson(map); 293 return QueryRecord.GsonCreator.getInstance().toJson(map);
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/ExistConstantApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java
index 5df66bc..74c531f 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/ExistConstantApproximator.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java
@@ -1,14 +1,14 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import org.semanticweb.HermiT.model.DLClause; 3import org.semanticweb.HermiT.model.DLClause;
4import uk.ac.ox.cs.pagoda.rules.OverApproxExist; 4import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID;
5 5
6import java.util.Collection; 6import java.util.Collection;
7 7
8/** 8/**
9 * A wrapper for <tt>OverApproxExist</tt>. 9 * A wrapper for <tt>OverApproxExist</tt>.
10 * */ 10 * */
11public class ExistConstantApproximator implements ExistApproximator { 11public class ExistConstantApproximator implements TupleDependentApproximator {
12 12
13 private final OverApproxExist overApproxExist; 13 private final OverApproxExist overApproxExist;
14 14
diff --git a/src/uk/ac/ox/cs/pagoda/rules/LimitedSkolemisationApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/LimitedSkolemisationApproximator.java
new file mode 100644
index 0000000..284edd2
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/rules/LimitedSkolemisationApproximator.java
@@ -0,0 +1,72 @@
1package uk.ac.ox.cs.pagoda.rules;
2
3import org.semanticweb.HermiT.model.DLClause;
4import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID;
5
6import java.util.*;
7
8/**
9 * Approximates existential rules by a limited form of Skolemisation.
10 * <p>
11 * Given a rule and a grounding substitution,
12 * it Skolemises the rule if
13 * all the terms in the substitution have depth less than a given depth,
14 * otherwise it approximates using an alternative <tt>TupleDependentApproximator</tt>.
15 *
16 * */
17public class LimitedSkolemisationApproximator implements TupleDependentApproximator {
18
19 private final int maxTermDepth;
20 private final TupleDependentApproximator alternativeApproximator;
21
22 private Map<AnswerTupleID, Integer> mapIndividualsToDepth;
23
24 public LimitedSkolemisationApproximator(int maxTermDepth) {
25 this(maxTermDepth, new ExistConstantApproximator());
26 }
27
28 public LimitedSkolemisationApproximator(int maxTermDepth, TupleDependentApproximator alternativeApproximator) {
29 this.maxTermDepth = maxTermDepth;
30 this.alternativeApproximator = alternativeApproximator;
31 this.mapIndividualsToDepth = new HashMap<>();
32 }
33
34 @Override
35 public Collection<DLClause> convert(DLClause clause, DLClause originalClause, Collection<AnswerTupleID> violationTuples) {
36 switch (clause.getHeadLength()) {
37 case 1:
38 return overApprox(clause, originalClause, violationTuples);
39 case 0:
40 return Arrays.asList(clause);
41 default:
42 ArrayList<DLClause> result = new ArrayList<>();
43 // TODO implement
44 return result;
45 }
46
47
48 }
49
50 private Collection<DLClause> overApprox(DLClause clause, DLClause originalClause, Collection<AnswerTupleID> violationTuples) {
51 ArrayList<DLClause> result = new ArrayList<>();
52
53 for (AnswerTupleID violationTuple : violationTuples)
54 if (getDepth(violationTuple) > maxTermDepth)
55 result.addAll(alternativeApproximator.convert(clause, originalClause, null));
56 else
57 result.add(getInstantiatedSkolemisation(clause, originalClause, violationTuple));
58
59 return result;
60 }
61
62
63 private DLClause getInstantiatedSkolemisation(DLClause clause, DLClause originalClause, AnswerTupleID violationTuple) {
64 // TODO implement
65 return null;
66 }
67
68 private int getDepth(AnswerTupleID violationTuple) {
69 if (!mapIndividualsToDepth.containsKey(violationTuple)) return 0;
70 return mapIndividualsToDepth.get(violationTuple);
71 }
72}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java b/src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java
index 2ff1673..1099acf 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java
@@ -8,87 +8,67 @@ import java.util.*;
8 8
9public class OverApproxExist implements Approximator { 9public class OverApproxExist implements Approximator {
10 10
11 @Override 11 public static final String negativeSuffix = "_neg";
12 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) { 12 public static final String skolemisedIndividualPrefix = Namespace.PAGODA_ANONY + "individual";
13 Collection<DLClause> ret; 13 private static final Variable X = Variable.create("X");
14 switch (clause.getHeadLength()) { 14 private static int individualCounter = 0;
15 case 1: 15 private static Map<DLClause, Integer> individualNumber = new HashMap<DLClause, Integer>();
16 return overApprox(clause.getHeadAtom(0), clause.getBodyAtoms(), originalClause); 16
17 case 0:
18 ret = new LinkedList<DLClause>();
19 ret.add(clause);
20 return ret;
21 default:
22 ret = new LinkedList<DLClause>();
23 for (Iterator<DLClause> iter = new DisjunctiveHeads(clause, originalClause); iter.hasNext(); )
24 ret.add(iter.next());
25 return ret;
26 }
27 }
28
29 private static int noOfExistential(DLClause originalClause) { 17 private static int noOfExistential(DLClause originalClause) {
30 int no = 0; 18 int no = 0;
31 for (Atom atom: originalClause.getHeadAtoms()) 19 for (Atom atom: originalClause.getHeadAtoms())
32 if (atom.getDLPredicate() instanceof AtLeast) 20 if (atom.getDLPredicate() instanceof AtLeast)
33 no += ((AtLeast) atom.getDLPredicate()).getNumber(); 21 no += ((AtLeast) atom.getDLPredicate()).getNumber();
34 return no; 22 return no;
35 } 23 }
36 24
37 private static int indexOfExistential(Atom headAtom, DLClause originalClause) { 25 private static int indexOfExistential(Atom headAtom, DLClause originalClause) {
38 if (!(headAtom.getDLPredicate() instanceof AtLeast)) return -1; 26 if (!(headAtom.getDLPredicate() instanceof AtLeast)) return -1;
39 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate(); 27 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate();
40 if (alc.getToConcept() instanceof AtomicConcept) { 28 if (alc.getToConcept() instanceof AtomicConcept) {
41 AtomicConcept ac = (AtomicConcept) alc.getToConcept(); 29 AtomicConcept ac = (AtomicConcept) alc.getToConcept();
42 if (ac.getIRI().endsWith(negativeSuffix)) { 30 if (ac.getIRI().endsWith(negativeSuffix)) {
43 alc = AtLeastConcept.create(alc.getNumber(), alc.getOnRole(), AtomicNegationConcept.create(getNegationConcept(ac))); 31 alc = AtLeastConcept.create(alc.getNumber(), alc.getOnRole(), AtomicNegationConcept.create(getNegationConcept(ac)));
44 headAtom = Atom.create(alc, headAtom.getArgument(0)); 32 headAtom = Atom.create(alc, headAtom.getArgument(0));
45 } 33 }
46 } 34 }
47 35
48 int index = 0; 36 int index = 0;
49 for (Atom atom: originalClause.getHeadAtoms()) { 37 for (Atom atom: originalClause.getHeadAtoms()) {
50 if (atom.equals(headAtom)) 38 if (atom.equals(headAtom))
51 return index; 39 return index;
52 if (atom.getDLPredicate() instanceof AtLeast) 40 if (atom.getDLPredicate() instanceof AtLeast)
53 index += ((AtLeast) atom.getDLPredicate()).getNumber(); 41 index += ((AtLeast) atom.getDLPredicate()).getNumber();
54 } 42 }
55 return -1; 43 return -1;
56 } 44 }
57
58 private static final Variable X = Variable.create("X");
59 public static final String negativeSuffix = "_neg";
60 45
61 public static AtomicConcept getNegationConcept(DLPredicate p) { 46 public static AtomicConcept getNegationConcept(DLPredicate p) {
62 if (p.equals(AtomicConcept.THING)) return AtomicConcept.NOTHING; 47 if (p.equals(AtomicConcept.THING)) return AtomicConcept.NOTHING;
63 if (p.equals(AtomicConcept.NOTHING)) return AtomicConcept.THING; 48 if (p.equals(AtomicConcept.NOTHING)) return AtomicConcept.THING;
64 49
65 if (p instanceof AtomicConcept) { 50 if (p instanceof AtomicConcept) {
66 String iri = ((AtomicConcept) p).getIRI(); 51 String iri = ((AtomicConcept) p).getIRI();
67 if (iri.endsWith(negativeSuffix)) 52 if (iri.endsWith(negativeSuffix))
68 iri = iri.substring(0, iri.length() - 4); 53 iri = iri.substring(0, iri.length() - 4);
69 else 54 else
70 iri += negativeSuffix; 55 iri += negativeSuffix;
71 56
72 return AtomicConcept.create(iri); 57 return AtomicConcept.create(iri);
73 } 58 }
74 if (p instanceof AtLeastConcept) { 59 if (p instanceof AtLeastConcept) {
75 // FIXME !!! here 60 // FIXME !!! here
76 return null; 61 return null;
77 } 62 }
78 return null; 63 return null;
79 } 64 }
80 65
81 public static final String skolemisedIndividualPrefix = Namespace.PAGODA_ANONY + "individual";
82
83 private static int individualCounter = 0;
84 private static Map<DLClause, Integer> individualNumber = new HashMap<DLClause, Integer>();
85
86 public static int getNumberOfSkolemisedIndividual() { 66 public static int getNumberOfSkolemisedIndividual() {
87 return individualCounter; 67 return individualCounter;
88 } 68 }
89 69
90 public static Individual getNewIndividual(DLClause originalClause, int offset) { 70 public static Individual getNewIndividual(DLClause originalClause, int offset) {
91 Individual ret; 71 Individual ret;
92 if (individualNumber.containsKey(originalClause)) { 72 if (individualNumber.containsKey(originalClause)) {
93 ret = Individual.create(skolemisedIndividualPrefix + (individualNumber.get(originalClause) + offset)); 73 ret = Individual.create(skolemisedIndividualPrefix + (individualNumber.get(originalClause) + offset));
94 } 74 }
@@ -97,16 +77,34 @@ public class OverApproxExist implements Approximator {
97 ret = Individual.create(skolemisedIndividualPrefix + (individualCounter + offset)); 77 ret = Individual.create(skolemisedIndividualPrefix + (individualCounter + offset));
98 individualCounter += noOfExistential(originalClause); 78 individualCounter += noOfExistential(originalClause);
99 } 79 }
100 return ret; 80 return ret;
101 } 81 }
102 82
103 public static int indexOfSkolemisedIndividual(Atom atom) { 83 public static int indexOfSkolemisedIndividual(Atom atom) {
104 Term t; 84 Term t;
105 for (int index = 0; index < atom.getArity(); ++index) { 85 for (int index = 0; index < atom.getArity(); ++index) {
106 t = atom.getArgument(index); 86 t = atom.getArgument(index);
107 if (t instanceof Individual && ((Individual) t).getIRI().contains(skolemisedIndividualPrefix)) return index; 87 if (t instanceof Individual && ((Individual) t).getIRI().contains(skolemisedIndividualPrefix)) return index;
108 } 88 }
109 return -1; 89 return -1;
90 }
91
92 @Override
93 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
94 Collection<DLClause> ret;
95 switch (clause.getHeadLength()) {
96 case 1:
97 return overApprox(clause.getHeadAtom(0), clause.getBodyAtoms(), originalClause);
98 case 0:
99 ret = new LinkedList<DLClause>();
100 ret.add(clause);
101 return ret;
102 default:
103 ret = new LinkedList<DLClause>();
104 for (Iterator<DLClause> iter = new DisjunctiveHeads(clause, originalClause); iter.hasNext(); )
105 ret.add(iter.next());
106 return ret;
107 }
110 } 108 }
111 109
112 public Collection<DLClause> overApprox(Atom headAtom, Atom[] bodyAtoms, DLClause originalClause) { 110 public Collection<DLClause> overApprox(Atom headAtom, Atom[] bodyAtoms, DLClause originalClause) {
@@ -123,7 +121,7 @@ public class OverApproxExist implements Approximator {
123 AtomicConcept atomicConcept = null; 121 AtomicConcept atomicConcept = null;
124 122
125 if (concept instanceof AtomicNegationConcept) { 123 if (concept instanceof AtomicNegationConcept) {
126 // is this already in MultiStageUpperProgram? 124 // TODO CHECK: is this already in MultiStageUpperProgram?
127 Atom atom1 = Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X); 125 Atom atom1 = Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X);
128 Atom atom2 = Atom.create(atomicConcept = getNegationConcept(atomicConcept), X); 126 Atom atom2 = Atom.create(atomicConcept = getNegationConcept(atomicConcept), X);
129 ret.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2})); 127 ret.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2}));
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/ExistApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/TupleDependentApproximator.java
index 0ee2035..63057c4 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/ExistApproximator.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/TupleDependentApproximator.java
@@ -1,13 +1,14 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import org.semanticweb.HermiT.model.DLClause; 3import org.semanticweb.HermiT.model.DLClause;
4import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID;
4 5
5import java.util.Collection; 6import java.util.Collection;
6 7
7/** 8/**
8 * An approximator for existential rules. 9 * It approximates rules according to a specific instantiation of the body.
9 * */ 10 */
10public interface ExistApproximator { 11public interface TupleDependentApproximator {
11 12
12 Collection<DLClause> convert(DLClause clause, 13 Collection<DLClause> convert(DLClause clause,
13 DLClause originalClause, 14 DLClause originalClause,
diff --git a/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java b/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java
index 31838bc..1e53b9c 100644
--- a/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java
+++ b/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java
@@ -1,49 +1,18 @@
1package uk.ac.ox.cs.pagoda.util; 1package uk.ac.ox.cs.pagoda.util;
2 2
3import java.util.Collection;
4import java.util.HashSet;
5import java.util.Set;
6
7import org.semanticweb.HermiT.model.AnnotatedEquality;
8import org.semanticweb.HermiT.model.AtLeastConcept;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicConcept;
11import org.semanticweb.HermiT.model.AtomicDataRange;
12import org.semanticweb.HermiT.model.AtomicRole;
13import org.semanticweb.HermiT.model.Constant;
14import org.semanticweb.HermiT.model.DLPredicate;
15import org.semanticweb.HermiT.model.Equality;
16import org.semanticweb.HermiT.model.Individual;
17import org.semanticweb.HermiT.model.Inequality;
18import org.semanticweb.HermiT.model.Term;
19import org.semanticweb.HermiT.model.Variable;
20
21import uk.ac.ox.cs.pagoda.MyPrefixes;
22import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
23
24import com.hp.hpl.jena.graph.Node; 3import com.hp.hpl.jena.graph.Node;
25import com.hp.hpl.jena.query.Query; 4import com.hp.hpl.jena.query.Query;
26import com.hp.hpl.jena.query.QueryFactory; 5import com.hp.hpl.jena.query.QueryFactory;
27import com.hp.hpl.jena.sparql.core.TriplePath; 6import com.hp.hpl.jena.sparql.core.TriplePath;
28import com.hp.hpl.jena.sparql.core.Var; 7import com.hp.hpl.jena.sparql.core.Var;
29import com.hp.hpl.jena.sparql.syntax.Element; 8import com.hp.hpl.jena.sparql.syntax.*;
30import com.hp.hpl.jena.sparql.syntax.ElementAssign; 9import org.semanticweb.HermiT.model.*;
31import com.hp.hpl.jena.sparql.syntax.ElementBind; 10import uk.ac.ox.cs.pagoda.MyPrefixes;
32import com.hp.hpl.jena.sparql.syntax.ElementData; 11import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
33import com.hp.hpl.jena.sparql.syntax.ElementDataset; 12
34import com.hp.hpl.jena.sparql.syntax.ElementExists; 13import java.util.Collection;
35import com.hp.hpl.jena.sparql.syntax.ElementFilter; 14import java.util.HashSet;
36import com.hp.hpl.jena.sparql.syntax.ElementGroup; 15import java.util.Set;
37import com.hp.hpl.jena.sparql.syntax.ElementMinus;
38import com.hp.hpl.jena.sparql.syntax.ElementNamedGraph;
39import com.hp.hpl.jena.sparql.syntax.ElementNotExists;
40import com.hp.hpl.jena.sparql.syntax.ElementOptional;
41import com.hp.hpl.jena.sparql.syntax.ElementPathBlock;
42import com.hp.hpl.jena.sparql.syntax.ElementService;
43import com.hp.hpl.jena.sparql.syntax.ElementSubQuery;
44import com.hp.hpl.jena.sparql.syntax.ElementTriplesBlock;
45import com.hp.hpl.jena.sparql.syntax.ElementUnion;
46import com.hp.hpl.jena.sparql.syntax.ElementVisitor;
47 16
48public class SparqlHelper { 17public class SparqlHelper {
49 18
@@ -52,7 +21,7 @@ public class SparqlHelper {
52 for (int i = 0; i < atoms.length; ++i) { 21 for (int i = 0; i < atoms.length; ++i) {
53 atoms[i].getVariables(undistinguishedVars); 22 atoms[i].getVariables(undistinguishedVars);
54 } 23 }
55 int xIndex = 1; 24 int xIndex = 1;
56 while (undistinguishedVars.contains(Variable.create("X" + xIndex))) ++xIndex; 25 while (undistinguishedVars.contains(Variable.create("X" + xIndex))) ++xIndex;
57 26
58 for (String var: vars) 27 for (String var: vars)