aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorRncLsn <rnc.lsn@gmail.com>2015-08-14 19:21:26 +0100
committerRncLsn <rnc.lsn@gmail.com>2015-08-14 19:21:26 +0100
commitae9a6bad58019ef18657568e58f49459fbadc49c (patch)
tree104d92748f150dc74c9a0f6d19f328357903857e /src
parent1bfe7e876c16adf73a4effdbe80431c1822bbe93 (diff)
downloadACQuA-ae9a6bad58019ef18657568e58f49459fbadc49c.tar.gz
ACQuA-ae9a6bad58019ef18657568e58f49459fbadc49c.zip
Incremental Skolemised store (not working).
Diffstat (limited to 'src')
-rw-r--r--src/resources/pagoda.properties2
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/IncrementalMultiStageQueryEngine.java12
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java98
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java36
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptNaive.java12
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java2
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Treatment.java25
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java39
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java9
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/ExponentialInterpolation.java33
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java21
11 files changed, 242 insertions, 47 deletions
diff --git a/src/resources/pagoda.properties b/src/resources/pagoda.properties
index dab8388..34b3d7a 100644
--- a/src/resources/pagoda.properties
+++ b/src/resources/pagoda.properties
@@ -3,7 +3,7 @@ useAlwaysSimpleUpperBound=false
3#skolemUpperBound=DISABLED 3#skolemUpperBound=DISABLED
4#skolemUpperBound=BEFORE_SUMMARISATION 4#skolemUpperBound=BEFORE_SUMMARISATION
5skolemUpperBound=AFTER_SUMMARISATION 5skolemUpperBound=AFTER_SUMMARISATION
6skolemDepth=1 6skolemDepth=10
7toCallHermit=true 7toCallHermit=true
8 8
9statisticsDir=/home/alessandro/Dropbox/Oxford/PAGOdA/statistics \ No newline at end of file 9statisticsDir=/home/alessandro/Dropbox/Oxford/PAGOdA/statistics \ No newline at end of file
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/IncrementalMultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/IncrementalMultiStageQueryEngine.java
new file mode 100644
index 0000000..30ba9da
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/multistage/IncrementalMultiStageQueryEngine.java
@@ -0,0 +1,12 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3/***
4 * It is like a <tt>MultiStageQueryEngine</tt>, but you can call <tt>materialiseSkolemly</tt>
5 * multiple times with increasing values of <tt>maxTermDepth</tt>.
6 */
7public class IncrementalMultiStageQueryEngine extends MultiStageQueryEngine {
8
9 public IncrementalMultiStageQueryEngine(String name, boolean checkValidity) {
10 super(name, checkValidity);
11 }
12}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
index 33f9f03..f3a78f6 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
@@ -1,13 +1,17 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import org.semanticweb.HermiT.model.Atom;
3import org.semanticweb.HermiT.model.DLClause; 4import org.semanticweb.HermiT.model.DLClause;
5import org.semanticweb.HermiT.model.Individual;
4import uk.ac.ox.cs.JRDFox.JRDFStoreException; 6import uk.ac.ox.cs.JRDFox.JRDFStoreException;
7import uk.ac.ox.cs.JRDFox.store.DataStore;
5import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 8import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
6import uk.ac.ox.cs.pagoda.multistage.treatement.Pick4NegativeConceptNaive; 9import uk.ac.ox.cs.pagoda.multistage.treatement.Pick4NegativeConceptNaive;
7import uk.ac.ox.cs.pagoda.multistage.treatement.Pick4NegativeConceptQuerySpecific; 10import uk.ac.ox.cs.pagoda.multistage.treatement.Pick4NegativeConceptQuerySpecific;
8import uk.ac.ox.cs.pagoda.multistage.treatement.Treatment; 11import uk.ac.ox.cs.pagoda.multistage.treatement.Treatment;
9import uk.ac.ox.cs.pagoda.query.GapByStore4ID; 12import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
10import uk.ac.ox.cs.pagoda.query.QueryRecord; 13import uk.ac.ox.cs.pagoda.query.QueryRecord;
14import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
11import uk.ac.ox.cs.pagoda.rules.DatalogProgram; 15import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
12import uk.ac.ox.cs.pagoda.rules.Program; 16import uk.ac.ox.cs.pagoda.rules.Program;
13import uk.ac.ox.cs.pagoda.rules.approximators.SkolemTermsManager; 17import uk.ac.ox.cs.pagoda.rules.approximators.SkolemTermsManager;
@@ -15,21 +19,23 @@ import uk.ac.ox.cs.pagoda.util.PagodaProperties;
15import uk.ac.ox.cs.pagoda.util.Timer; 19import uk.ac.ox.cs.pagoda.util.Timer;
16import uk.ac.ox.cs.pagoda.util.Utility; 20import uk.ac.ox.cs.pagoda.util.Utility;
17import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; 21import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
22import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
23import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder;
18 24
19import java.io.BufferedWriter; 25import java.io.BufferedWriter;
20import java.io.IOException; 26import java.io.IOException;
21import java.nio.file.Files; 27import java.nio.file.Files;
22import java.nio.file.Path; 28import java.nio.file.Path;
23import java.nio.file.Paths; 29import java.nio.file.Paths;
24import java.util.ArrayList; 30import java.util.*;
25import java.util.Collection;
26import java.util.HashMap;
27import java.util.List;
28import java.util.stream.Collectors; 31import java.util.stream.Collectors;
29 32
30public class MultiStageQueryEngine extends StageQueryEngine { 33public class MultiStageQueryEngine extends StageQueryEngine {
31 34
32 private HashMap<String, List> statistics = new HashMap<>(); 35 private HashMap<String, List> statistics = new HashMap<>();
36 private Set<Tuple<Integer>> oversizedSkolemisedFacts;
37 private RDFoxTripleManager rdFoxTripleManager;
38 private int lastMaxTermDepth = -1;
33 39
34 public MultiStageQueryEngine(String name, boolean checkValidity) { 40 public MultiStageQueryEngine(String name, boolean checkValidity) {
35 super(name, checkValidity); 41 super(name, checkValidity);
@@ -73,14 +79,19 @@ public class MultiStageQueryEngine extends StageQueryEngine {
73 public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap, int maxTermDepth) { 79 public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap, int maxTermDepth) {
74 if(isDisposed()) throw new DisposedException(); 80 if(isDisposed()) throw new DisposedException();
75 81
82 if(maxTermDepth <= lastMaxTermDepth)
83 throw new IllegalArgumentException("maxTermDepth must be greater than " + lastMaxTermDepth);
84 lastMaxTermDepth = maxTermDepth;
85
76 materialise("lower program", dProgram.getLower().toString()); 86 materialise("lower program", dProgram.getLower().toString());
77 Program generalProgram = dProgram.getGeneral(); 87 Program generalProgram = dProgram.getGeneral();
78 LimitedSkolemisationApplication program = 88 LimitedSkolemisationApplication program =
79 new LimitedSkolemisationApplication(generalProgram, 89 new LimitedSkolemisationApplication(generalProgram,
80 dProgram.getUpperBottomStrategy(), 90 dProgram.getUpperBottomStrategy(),
81 maxTermDepth); 91 maxTermDepth);
82 Treatment treatment = new Pick4NegativeConceptNaive(this, program); 92 rdFoxTripleManager = new RDFoxTripleManager(store, true);
83 return materialise(program, treatment, gap); 93 Treatment treatment = new Pick4NegativeConceptNaive(this, program, rdFoxTripleManager);
94 return materialise(program, treatment, gap, maxTermDepth);
84 } 95 }
85 96
86 public int materialise4SpecificQuery(Program generalProgram, QueryRecord record, BottomStrategy upperBottom) { 97 public int materialise4SpecificQuery(Program generalProgram, QueryRecord record, BottomStrategy upperBottom) {
@@ -102,6 +113,12 @@ public class MultiStageQueryEngine extends StageQueryEngine {
102 } 113 }
103 114
104 private int materialise(MultiStageUpperProgram program, Treatment treatment, GapByStore4ID gap) { 115 private int materialise(MultiStageUpperProgram program, Treatment treatment, GapByStore4ID gap) {
116 return materialise(program, treatment, gap, -1);
117 }
118
119 private int materialise(MultiStageUpperProgram program, Treatment treatment, GapByStore4ID gap, int maxTermDepth) {
120 boolean actuallyCleaned = cleanStoreFromOversizedSkolemisedFacts();
121
105 if(gap != null) 122 if(gap != null)
106 treatment.addAdditionalGapTuples(); 123 treatment.addAdditionalGapTuples();
107 String programName = "multi-stage upper program"; 124 String programName = "multi-stage upper program";
@@ -144,7 +161,7 @@ public class MultiStageQueryEngine extends StageQueryEngine {
144// store.addRules(new String[] {datalogProgram}); 161// store.addRules(new String[] {datalogProgram});
145 store.importRules(datalogProgram); 162 store.importRules(datalogProgram);
146 } 163 }
147 store.applyReasoning(incrementally); 164 store.applyReasoning(incrementally || actuallyCleaned);
148 } 165 }
149 166
150// Utility.logInfo("The number of sameAs assertions in the current store: " + getSameAsNumber()); 167// Utility.logInfo("The number of sameAs assertions in the current store: " + getSameAsNumber());
@@ -172,7 +189,7 @@ public class MultiStageQueryEngine extends StageQueryEngine {
172 } 189 }
173 Utility.logDebug("Time to detect violations: " + subTimer.duration()); 190 Utility.logDebug("Time to detect violations: " + subTimer.duration());
174 191
175 store.makeFactsExplicit(); 192// store.makeFactsExplicit();
176 subTimer.reset(); 193 subTimer.reset();
177 oldTripleCount = store.getTriplesCount(); 194 oldTripleCount = store.getTriplesCount();
178 195
@@ -190,11 +207,15 @@ public class MultiStageQueryEngine extends StageQueryEngine {
190 Timer localTimer = new Timer(); 207 Timer localTimer = new Timer();
191 int number = v.size(); 208 int number = v.size();
192 long vOldCounter = store.getTriplesCount(); 209 long vOldCounter = store.getTriplesCount();
193 if(!treatment.makeSatisfied(v)) { 210 Set<Treatment.AtomWithIDTriple> satisfiabilityFacts;
211 if((satisfiabilityFacts = treatment.makeSatisfied(v)) == null) {
194 validMaterialisation = false; 212 validMaterialisation = false;
195 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds."); 213 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds.");
196 return 0; 214 return 0;
197 } 215 }
216
217 addOversizedSkolemisedFacts(getOversizedSkolemisedFacts(satisfiabilityFacts, maxTermDepth));
218
198 Utility.logDebug("Time to make the constraint being satisfied: " + localTimer.duration()); 219 Utility.logDebug("Time to make the constraint being satisfied: " + localTimer.duration());
199 Utility.logDebug("Triples in the store: before=" + vOldCounter + ", after=" + store.getTriplesCount() + ", new=" + (store 220 Utility.logDebug("Triples in the store: before=" + vOldCounter + ", after=" + store.getTriplesCount() + ", new=" + (store
200 .getTriplesCount() - vOldCounter)); 221 .getTriplesCount() - vOldCounter));
@@ -212,6 +233,65 @@ public class MultiStageQueryEngine extends StageQueryEngine {
212 return 0; 233 return 0;
213 } 234 }
214 235
236 private boolean cleanStoreFromOversizedSkolemisedFacts() {
237 if(oversizedSkolemisedFacts == null || oversizedSkolemisedFacts.isEmpty())
238 return false;
239
240 try {
241 for (Tuple<Integer> tuple : oversizedSkolemisedFacts) {
242 int[] triple = new int[]{tuple.get(0), tuple.get(1), tuple.get(2)};
243 store.addTriplesByResourceIDs(triple, DataStore.UpdateType.ScheduleForDeletion);
244 }
245 } catch (JRDFStoreException e) {
246 e.printStackTrace();
247 System.exit(1);
248 }
249 oversizedSkolemisedFacts = new HashSet<>();
250
251 return true;
252 }
253
254 private void addOversizedSkolemisedFacts(Set<Tuple<Integer>> facts) {
255 if(oversizedSkolemisedFacts == null)
256 oversizedSkolemisedFacts = new HashSet<>();
257 oversizedSkolemisedFacts.addAll(facts);
258 }
259
260 /**
261 * Get triples containing Skolem individuals of depth greater or equal than the maximum.
262 *
263 * @param satisfiabilityFacts
264 * @return
265 */
266 private Set<Tuple<Integer>> getOversizedSkolemisedFacts(Set<Treatment.AtomWithIDTriple> satisfiabilityFacts, int maxDepth) {
267 HashSet<Tuple<Integer>> result = new HashSet<>();
268 SkolemTermsManager termsManager = SkolemTermsManager.getInstance();
269 for (Treatment.AtomWithIDTriple atomWithIDTriple : satisfiabilityFacts) {
270 Atom atom = atomWithIDTriple.getAtom();
271 if(atom.getArity() == 1) {
272 if(atom.getArgument(0) instanceof Individual && termsManager.getDepthOf((Individual) atom.getArgument(0)) >= maxDepth) {
273 int[] idTriple = atomWithIDTriple.getIDTriple();
274 result.add(new TupleBuilder<Integer>().append(idTriple[0]).append(idTriple[1])
275 .append(idTriple[2]).build());
276 }
277 else if(!(atom.getArgument(0) instanceof Individual))
278 throw new IllegalArgumentException("No individuals: " + atom);
279 }
280 else {
281 if((atom.getArgument(0) instanceof Individual && termsManager.getDepthOf((Individual) atom.getArgument(0)) >= maxDepth)
282 || (atom.getArgument(1) instanceof Individual && termsManager.getDepthOf((Individual) atom.getArgument(1)) >= maxDepth)){
283 int[] idTriple = atomWithIDTriple.getIDTriple();
284 result.add(new TupleBuilder<Integer>().append(idTriple[0]).append(idTriple[1])
285 .append(idTriple[2]).build());
286 }
287 else if(!(atom.getArgument(0) instanceof Individual) && !(atom.getArgument(1) instanceof Individual))
288 throw new IllegalArgumentException("No individuals: " + atom);
289 }
290
291 }
292 return result;
293 }
294
215 private void updateStatistics(String key, List<DLClause> value) { 295 private void updateStatistics(String key, List<DLClause> value) {
216 if(!statistics.containsKey(key)) 296 if(!statistics.containsKey(key))
217 statistics.put(key, new ArrayList<List>()); 297 statistics.put(key, new ArrayList<List>());
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
index 3528788..f88a4d7 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
@@ -29,10 +29,14 @@ public abstract class Pick4NegativeConcept extends Treatment {
29 PredicateDependency dependencyGraph; 29 PredicateDependency dependencyGraph;
30 boolean addGap = false; 30 boolean addGap = false;
31 31
32 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) { 32 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram, RDFoxTripleManager tripleManager) {
33 this.engine = store; 33 this.engine = store;
34 this.program = multiProgram; 34 this.program = multiProgram;
35 this.tripleManager = new RDFoxTripleManager(store.getDataStore(), true); 35 this.tripleManager = tripleManager;
36 }
37
38 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) {
39 this(store, multiProgram, new RDFoxTripleManager(store.getDataStore(), true));
36 } 40 }
37 41
38 @Override 42 @Override
@@ -41,16 +45,23 @@ public abstract class Pick4NegativeConcept extends Treatment {
41 addGap = true; 45 addGap = true;
42 } 46 }
43 47
44 void addTripleByID(Atom atom, Atom gapAtom, Map<Variable, Integer> assignment) { 48 Set<AtomWithIDTriple> addTripleByID(Atom atom, Atom gapAtom, Map<Variable, Integer> assignment) {
45 if(isDisposed()) throw new DisposedException(); 49 if(isDisposed()) throw new DisposedException();
50 HashSet<AtomWithIDTriple> result = new HashSet<>();
46 int[] newTuple = tripleManager.getInstance(atom, assignment); 51 int[] newTuple = tripleManager.getInstance(atom, assignment);
52 result.add(new AtomWithIDTriple(atom, newTuple));
47 tripleManager.addTripleByID(newTuple); 53 tripleManager.addTripleByID(newTuple);
48 if(addGap) 54 if(addGap) {
49 tripleManager.addTripleByID(tripleManager.getInstance(gapAtom, assignment)); 55 int[] instance = tripleManager.getInstance(gapAtom, assignment);
56 tripleManager.addTripleByID(instance);
57 result.add(new AtomWithIDTriple(gapAtom, instance));
58 }
59 return result;
50 } 60 }
51 61
52 // TODO -RULE- 62 // TODO -RULE-
53 protected boolean makeSatisfied(Violation violation, Comparator<Atom> comp) { 63 protected Set<AtomWithIDTriple> makeSatisfied(Violation violation, Comparator<Atom> comp) {
64 HashSet<AtomWithIDTriple> result = new HashSet<>();
54 LinkedList<AnswerTupleID> tuples = violation.getTuples(); 65 LinkedList<AnswerTupleID> tuples = violation.getTuples();
55 DLClause constraint = violation.getConstraint(); 66 DLClause constraint = violation.getConstraint();
56 Map<Variable, Integer> assignment = new HashMap<Variable, Integer>(); 67 Map<Variable, Integer> assignment = new HashMap<Variable, Integer>();
@@ -97,7 +108,7 @@ public abstract class Pick4NegativeConcept extends Treatment {
97 if(lastAdded == null || tComp.compare(lastAdded, tuple) != 0) { 108 if(lastAdded == null || tComp.compare(lastAdded, tuple) != 0) {
98 lastAdded = tuple; 109 lastAdded = tuple;
99 tuple.getAssignment(violation.getVariables(), assignment); 110 tuple.getAssignment(violation.getVariables(), assignment);
100 addTripleByID(headAtom, gapHeadAtom, assignment); 111 result.addAll(addTripleByID(headAtom, gapHeadAtom, assignment));
101 } 112 }
102 iter.remove(); 113 iter.remove();
103 } 114 }
@@ -105,9 +116,9 @@ public abstract class Pick4NegativeConcept extends Treatment {
105// tuples.reset(); 116// tuples.reset();
106 117
107 if(tuples.isEmpty()) 118 if(tuples.isEmpty())
108 return true; 119 return result;
109 } 120 }
110 if(!tuples.isEmpty()) return false; 121 if(!tuples.isEmpty()) return null;
111 } 122 }
112 else { 123 else {
113 Set<Atom> headAtoms = new HashSet<Atom>(); 124 Set<Atom> headAtoms = new HashSet<Atom>();
@@ -136,7 +147,7 @@ public abstract class Pick4NegativeConcept extends Treatment {
136 if(DLClauseHelper.isGround(tHeadAtom)) { 147 if(DLClauseHelper.isGround(tHeadAtom)) {
137 if(!addedGroundAtoms.contains(tHeadAtom)) { 148 if(!addedGroundAtoms.contains(tHeadAtom)) {
138 program.addUpdatedPredicate(tHeadAtom.getDLPredicate()); 149 program.addUpdatedPredicate(tHeadAtom.getDLPredicate());
139 addTripleByID(tHeadAtom, tGapHeadAtom, null); 150 result.addAll(addTripleByID(tHeadAtom, tGapHeadAtom, null));
140 addedGroundAtoms.add(tHeadAtom); 151 addedGroundAtoms.add(tHeadAtom);
141 } 152 }
142 } 153 }
@@ -149,13 +160,14 @@ public abstract class Pick4NegativeConcept extends Treatment {
149 for(AnswerTupleID tuple : tuples) { 160 for(AnswerTupleID tuple : tuples) {
150 tuple.getAssignment(violation.getVariables(), assignment); 161 tuple.getAssignment(violation.getVariables(), assignment);
151 for(Atom atom : headAtoms) { 162 for(Atom atom : headAtoms) {
152 addTripleByID(atom, getGapAtom(atom), assignment); 163 Atom gapAtom = getGapAtom(atom);
164 result.addAll(addTripleByID(atom, gapAtom, assignment));
153 } 165 }
154 } 166 }
155 } 167 }
156 168
157 assignment.clear(); 169 assignment.clear();
158 return true; 170 return result;
159 } 171 }
160 172
161 private Atom getGapAtom(Atom atom) { 173 private Atom getGapAtom(Atom atom) {
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptNaive.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptNaive.java
index af190fc..2fc2683 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptNaive.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptNaive.java
@@ -5,19 +5,27 @@ import uk.ac.ox.cs.pagoda.constraints.PredicateDependency;
5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
6import uk.ac.ox.cs.pagoda.multistage.MultiStageUpperProgram; 6import uk.ac.ox.cs.pagoda.multistage.MultiStageUpperProgram;
7import uk.ac.ox.cs.pagoda.multistage.Violation; 7import uk.ac.ox.cs.pagoda.multistage.Violation;
8import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
8import uk.ac.ox.cs.pagoda.util.disposable.DisposedException; 9import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
9 10
11import java.util.Set;
12
10public class Pick4NegativeConceptNaive extends Pick4NegativeConcept { 13public class Pick4NegativeConceptNaive extends Pick4NegativeConcept {
11 14
12 SimpleComparator comp = new SimpleComparator(); 15 SimpleComparator comp = new SimpleComparator();
13 16
14 public Pick4NegativeConceptNaive(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) { 17 public Pick4NegativeConceptNaive(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) {
15 super(store, multiProgram); 18 super(store, multiProgram);
16 dependencyGraph = new PredicateDependency(multiProgram.getClauses()); 19 dependencyGraph = new PredicateDependency(multiProgram.getClauses());
17 } 20 }
21
22 public Pick4NegativeConceptNaive(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram, RDFoxTripleManager rdFoxTripleManager) {
23 super(store, multiProgram, rdFoxTripleManager);
24 dependencyGraph = new PredicateDependency(multiProgram.getClauses());
25 }
18 26
19 @Override 27 @Override
20 public boolean makeSatisfied(Violation violation) throws JRDFStoreException { 28 public Set<AtomWithIDTriple> makeSatisfied(Violation violation) throws JRDFStoreException {
21 if(isDisposed()) throw new DisposedException(); 29 if(isDisposed()) throw new DisposedException();
22 return makeSatisfied(violation, comp); 30 return makeSatisfied(violation, comp);
23 } 31 }
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java
index 20b4376..675bfc3 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java
@@ -31,7 +31,7 @@ public class Pick4NegativeConceptQuerySpecific extends Pick4NegativeConcept {
31 } 31 }
32 32
33 @Override 33 @Override
34 public boolean makeSatisfied(Violation violation) throws JRDFStoreException { 34 public Set<AtomWithIDTriple> makeSatisfied(Violation violation) throws JRDFStoreException {
35 if(isDisposed()) throw new DisposedException(); 35 if(isDisposed()) throw new DisposedException();
36 return makeSatisfied(violation, comp); 36 return makeSatisfied(violation, comp);
37 } 37 }
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Treatment.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Treatment.java
index cb81af0..de76d27 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Treatment.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Treatment.java
@@ -1,12 +1,35 @@
1package uk.ac.ox.cs.pagoda.multistage.treatement; 1package uk.ac.ox.cs.pagoda.multistage.treatement;
2 2
3import org.semanticweb.HermiT.model.Atom;
3import uk.ac.ox.cs.JRDFox.JRDFStoreException; 4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
4import uk.ac.ox.cs.pagoda.multistage.Violation; 5import uk.ac.ox.cs.pagoda.multistage.Violation;
5import uk.ac.ox.cs.pagoda.util.disposable.Disposable; 6import uk.ac.ox.cs.pagoda.util.disposable.Disposable;
6 7
8import java.util.Set;
9
7public abstract class Treatment extends Disposable { 10public abstract class Treatment extends Disposable {
8 11
9 public abstract boolean makeSatisfied(Violation violation) throws JRDFStoreException; 12 public abstract Set<AtomWithIDTriple> makeSatisfied(Violation violation) throws JRDFStoreException;
10 13
11 public abstract void addAdditionalGapTuples(); 14 public abstract void addAdditionalGapTuples();
15
16 public class AtomWithIDTriple {
17
18 private Atom atom;
19 private int[] IDTriple;
20
21 public AtomWithIDTriple(Atom atom, int[] IDTriple) {
22 this.atom = atom;
23 this.IDTriple = IDTriple;
24 }
25
26 public Atom getAtom() {
27 return atom;
28 }
29
30 public int[] getIDTriple() {
31 return IDTriple;
32 }
33
34 }
12} 35}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
index 71d5752..a393474 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
@@ -396,24 +396,31 @@ class MyQueryReasoner extends QueryReasoner {
396 relevantStore.importDataFromABoxOf(relevantSubset); 396 relevantStore.importDataFromABoxOf(relevantSubset);
397 String relevantOriginalMarkProgram = OWLHelper.getOriginalMarkProgram(relevantSubset); 397 String relevantOriginalMarkProgram = OWLHelper.getOriginalMarkProgram(relevantSubset);
398 398
399 int queryDependentMaxTermDepth = properties.getSkolemDepth();
400 relevantStore.materialise("Mark original individuals", relevantOriginalMarkProgram); 399 relevantStore.materialise("Mark original individuals", relevantOriginalMarkProgram);
401 int materialisationTag = relevantStore.materialiseSkolemly(relevantProgram, null,
402 queryDependentMaxTermDepth);
403 queryRecord.addProcessingTime(Step.SKOLEM_UPPER_BOUND, t.duration());
404 if(materialisationTag == -1) {
405 throw new Error("A consistent ontology has turned out to be " +
406 "inconsistent in the Skolemises-relevant-upper-store");
407 }
408 else if(materialisationTag != 1) {
409 Utility.logInfo("Semi-Skolemised relevant upper store cannot be employed");
410 return false;
411 }
412 400
413 Utility.logInfo("Querying semi-Skolemised upper store..."); 401 boolean isFullyProcessed = false;
414 boolean isFullyProcessed = queryUpperStore(relevantStore, queryRecord, 402 for (int currentMaxTermDepth = 1;
415 queryRecord.getExtendedQueryText(), 403 currentMaxTermDepth <= properties.getSkolemDepth() && !isFullyProcessed; currentMaxTermDepth++) {
416 Step.SKOLEM_UPPER_BOUND); 404
405 Utility.logInfo("Trying with maximum depth " + currentMaxTermDepth);
406
407 int materialisationTag = relevantStore.materialiseSkolemly(relevantProgram, null,
408 currentMaxTermDepth);
409 queryRecord.addProcessingTime(Step.SKOLEM_UPPER_BOUND, t.duration());
410 if(materialisationTag == -1) {
411 throw new Error("A consistent ontology has turned out to be " +
412 "inconsistent in the Skolemises-relevant-upper-store");
413 }
414 else if(materialisationTag != 1) {
415 Utility.logInfo("Semi-Skolemised relevant upper store cannot be employed");
416 return false;
417 }
418
419 Utility.logInfo("Querying semi-Skolemised upper store...");
420 isFullyProcessed = queryUpperStore(relevantStore, queryRecord,
421 queryRecord.getExtendedQueryText(),
422 Step.SKOLEM_UPPER_BOUND);
423 }
417 424
418 relevantStore.dispose(); 425 relevantStore.dispose();
419 Utility.logInfo("Semi-Skolemised relevant upper store has been evaluated"); 426 Utility.logInfo("Semi-Skolemised relevant upper store has been evaluated");
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
index 85f8ef9..62885be 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
@@ -93,6 +93,14 @@ public class RDFoxTripleManager {
93 e.printStackTrace(); 93 e.printStackTrace();
94 } 94 }
95 } 95 }
96
97 public void removeTripleByTermIncrementally(Atom atom) {
98 try {
99 m_store.addTriples(getRDFoxTriple(atom), UpdateType.ScheduleForDeletion);
100 } catch (JRDFStoreException e) {
101 e.printStackTrace();
102 }
103 }
96 104
97 public static GroundTerm[] getRDFoxTriple(Atom instance) { 105 public static GroundTerm[] getRDFoxTriple(Atom instance) {
98 if (instance.getArity() == 1) 106 if (instance.getArity() == 1)
@@ -258,5 +266,4 @@ public class RDFoxTripleManager {
258 else 266 else
259 return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">"; 267 return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">";
260 } 268 }
261
262} 269}
diff --git a/src/uk/ac/ox/cs/pagoda/util/ExponentialInterpolation.java b/src/uk/ac/ox/cs/pagoda/util/ExponentialInterpolation.java
new file mode 100644
index 0000000..1d12169
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/util/ExponentialInterpolation.java
@@ -0,0 +1,33 @@
1package uk.ac.ox.cs.pagoda.util;
2
3/***
4 * Get an exponential function given two points.
5 */
6public class ExponentialInterpolation {
7
8 private final double base;
9 private final double multiplicativeFactor;
10
11 /***
12 * Compute the exponential function passing for the 2 given points.
13 *
14 * @param x1
15 * @param y1
16 * @param x2
17 * @param y2
18 */
19 public ExponentialInterpolation(double x1, double y1, double x2, double y2) {
20 base = Math.pow(y2/y1, 1 / (x2 - x1));
21 multiplicativeFactor = y1 / Math.pow(base, x1);
22 }
23
24 /***
25 * Compute value of the function in x.
26 *
27 * @param x
28 * @return
29 */
30 public double computeValue(double x) {
31 return multiplicativeFactor * Math.pow(base, x);
32 }
33}
diff --git a/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java b/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java
index ee2b74d..172e249 100644
--- a/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java
+++ b/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java
@@ -1,18 +1,31 @@
1package uk.ac.ox.cs.pagoda.util.tuples; 1package uk.ac.ox.cs.pagoda.util.tuples;
2 2
3import java.util.Collections;
4
3/** 5/**
4 * Allows to create an immutable <tt>Tuple</tt> in a non-atomic way. 6 * Allows to create an immutable <tt>Tuple</tt> in a non-atomic way.
5 * It can create only one <tt>Tuple</tt>. 7 * It can create only one <tt>Tuple</tt>.
6 * */ 8 * */
7public class TupleBuilder<T> { 9public class TupleBuilder<T> {
8 10
9 private Tuple tuple = new Tuple(); 11 private Tuple<T> tuple = new Tuple<T>();
10 12
11 private boolean building = true; 13 private boolean building = true;
12 14
13 public boolean append(T t) { 15 public TupleBuilder<T> append(T t) {
14 if(building) tuple.elements.add(t); 16 if(building) {
15 return building; 17 tuple.elements.add(t);
18 return this;
19 }
20 return null;
21 }
22
23 public TupleBuilder<T> append(T[] t) {
24 if(building) {
25 Collections.addAll(tuple.elements, t);
26 return this;
27 }
28 return null;
16 } 29 }
17 30
18 public Tuple<T> build() { 31 public Tuple<T> build() {