aboutsummaryrefslogtreecommitdiff
path: root/src/main/java/uk/ac/ox/cs/pagoda/reasoner
diff options
context:
space:
mode:
authorFederico Igne <federico.igne@cs.ox.ac.uk>2022-05-10 18:17:06 +0100
committerFederico Igne <federico.igne@cs.ox.ac.uk>2022-05-11 12:34:47 +0100
commit17bd9beaf7f358a44e5bf36a5855fe6727d506dc (patch)
tree47e9310a0cff869d9ec017dcb2c81876407782c8 /src/main/java/uk/ac/ox/cs/pagoda/reasoner
parent8651164cd632a5db310b457ce32d4fbc97bdc41c (diff)
downloadACQuA-17bd9beaf7f358a44e5bf36a5855fe6727d506dc.tar.gz
ACQuA-17bd9beaf7f358a44e5bf36a5855fe6727d506dc.zip
[pagoda] Move project to Scala
This commit includes a few changes: - The repository still uses Maven to manage dependency but it is now a Scala project. - The code has been ported from OWLAPI 3.4.10 to 5.1.20 - A proof of concept program using both RSAComb and PAGOdA has been added.
Diffstat (limited to 'src/main/java/uk/ac/ox/cs/pagoda/reasoner')
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java314
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java101
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java199
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java122
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java117
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java473
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java15
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java266
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java86
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java135
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java17
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java282
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java422
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java24
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java95
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java109
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java117
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java139
-rw-r--r--src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java269
19 files changed, 3302 insertions, 0 deletions
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java
new file mode 100644
index 0000000..3fd2fbd
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java
@@ -0,0 +1,314 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.HermiT.model.Atom;
4import org.semanticweb.HermiT.model.AtomicConcept;
5import org.semanticweb.HermiT.model.DLClause;
6import org.semanticweb.HermiT.model.Variable;
7import org.semanticweb.owlapi.model.OWLOntology;
8import org.semanticweb.owlapi.model.OWLOntologyCreationException;
9import org.semanticweb.owlapi.model.OWLOntologyManager;
10import uk.ac.ox.cs.JRDFox.JRDFStoreException;
11import uk.ac.ox.cs.JRDFox.store.DataStore;
12import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
13import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
14import uk.ac.ox.cs.pagoda.query.AnswerTuples;
15import uk.ac.ox.cs.pagoda.query.QueryManager;
16import uk.ac.ox.cs.pagoda.query.QueryRecord;
17import uk.ac.ox.cs.pagoda.reasoner.full.Checker;
18import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
19import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
20import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter;
21import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
22import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder;
23import uk.ac.ox.cs.pagoda.util.Timer;
24import uk.ac.ox.cs.pagoda.util.Utility;
25import uk.ac.ox.cs.pagoda.util.disposable.Disposable;
26import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
27
28import java.util.LinkedList;
29
30public class ConsistencyManager extends Disposable {
31
32 protected MyQueryReasoner m_reasoner;
33 protected QueryManager m_queryManager;
34
35 Timer t = new Timer();
36 QueryRecord fullQueryRecord;
37 QueryRecord[] botQueryRecords;
38 LinkedList<DLClause> toAddClauses;
39 boolean fragmentExtracted = false;
40
41 public ConsistencyManager(MyQueryReasoner reasoner) {
42 m_reasoner = reasoner;
43 m_queryManager = reasoner.getQueryManager();
44 }
45
46 @Override
47 public void dispose() {
48 super.dispose();
49 fullQueryRecord.dispose();
50 }
51
52 public void extractBottomFragment() {
53 if(isDisposed()) throw new DisposedException();
54
55 if(fragmentExtracted) return;
56 fragmentExtracted = true;
57
58 UpperDatalogProgram upperProgram = m_reasoner.program.getUpper();
59 int number = upperProgram.getBottomNumber();
60
61 if(number <= 1) {
62 botQueryRecords = new QueryRecord[]{fullQueryRecord};
63 }
64 else {
65 QueryRecord[] tempQueryRecords = new QueryRecord[number - 1];
66 QueryRecord record;
67 for(int i = 0; i < number - 1; ++i) {
68 tempQueryRecords[i] = record =
69 m_queryManager.create(QueryRecord.botQueryText.replace("Nothing", "Nothing" + (i + 1)), 0, i + 1);
70 AnswerTuples iter = null;
71 try {
72 iter = m_reasoner.trackingStore.evaluate(record.getQueryText(), record.getAnswerVariables());
73 record.updateUpperBoundAnswers(iter);
74 } finally {
75 if(iter != null) iter.dispose();
76 iter = null;
77 }
78 }
79
80 int bottomNumber = 0;
81 int[] group = new int[number - 1];
82 for(int i = 0; i < number - 1; ++i) group[i] = i;
83 for(int i = 0; i < number - 1; ++i)
84 if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose();
85 else if(group[i] == i) {
86 ++bottomNumber;
87 record = tempQueryRecords[i];
88 for(int j = i + 1; j < number - 1; ++j)
89 if(record.hasSameGapAnswers(tempQueryRecords[j]))
90 group[j] = i;
91 }
92
93 Utility.logInfo("There are " + bottomNumber + " different bottom fragments.");
94 toAddClauses = new LinkedList<DLClause>();
95 int bottomCounter = 0;
96 botQueryRecords = new QueryRecord[bottomNumber];
97 Variable X = Variable.create("X");
98 for(int i = 0; i < number - 1; ++i)
99 if(!tempQueryRecords[i].isDisposed() && !tempQueryRecords[i].isProcessed())
100 if(group[i] == i) {
101 botQueryRecords[bottomCounter] = record = tempQueryRecords[i];
102 record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0,
103 group[i] = bottomCounter);
104 toAddClauses.add(
105 DLClause.create(
106 new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + bottomCounter), X)},
107 new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)}));
108 }
109 else {
110 toAddClauses.add(
111 DLClause.create(
112 new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + "_final" + group[group[i]]), X)},
113 new Atom[]{Atom.create(AtomicConcept.create(AtomicConcept.NOTHING.getIRI() + (i + 1)), X)}));
114 tempQueryRecords[i].dispose();
115 }
116
117 upperProgram.updateDependencyGraph(toAddClauses);
118 }
119
120 String[] programs = collectTrackingProgramAndImport();
121 if(programs.length == 0)
122 return;
123
124 DataStore store = m_reasoner.trackingStore.getDataStore();
125 long oldTripleCount, tripleCount;
126 try {
127 Timer t1 = new Timer();
128 oldTripleCount = store.getTriplesCount();
129 for(String program : programs)
130 store.importRules(program, UpdateType.ScheduleForAddition);
131 store.applyReasoning(true);
132 tripleCount = store.getTriplesCount();
133
134 Utility.logDebug("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)",
135 "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds.");
136
137 extractAxioms();
138 store.clearRulesAndMakeFactsExplicit();
139 } catch(JRDFStoreException e) {
140 e.printStackTrace();
141 } catch(OWLOntologyCreationException e) {
142 e.printStackTrace();
143 }
144 }
145
146 public QueryRecord[] getQueryRecords() {
147 if(isDisposed()) throw new DisposedException();
148
149 return botQueryRecords;
150 }
151
152 boolean checkRLLowerBound() {
153 if(isDisposed()) throw new DisposedException();
154
155 fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0);
156 AnswerTuples iter = null;
157
158 try {
159 iter = m_reasoner.rlLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
160 fullQueryRecord.updateLowerBoundAnswers(iter);
161 } finally {
162 iter.dispose();
163 }
164
165 if (fullQueryRecord.getNoOfSoundAnswers() > 0) {
166 Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple());
167 return false;
168 }
169 return true;
170 }
171
172// protected boolean unsatisfiability(double duration) {
173// fullQueryRecord.dispose();
174// Utility.logDebug("The ontology and dataset is unsatisfiable.");
175// return false;
176// }
177
178// protected boolean satisfiability(double duration) {
179// fullQueryRecord.dispose();
180// Utility.logDebug("The ontology and dataset is satisfiable.");
181// return true;
182// }
183
184 boolean checkELLowerBound() {
185 if(isDisposed()) throw new DisposedException();
186
187 fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord
188 .getAnswerVariables()));
189 if(fullQueryRecord.getNoOfSoundAnswers() > 0) {
190 Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple());
191 return true;
192 }
193 return true;
194 }
195
196 boolean checkUpper(BasicQueryEngine upperStore) {
197 if(isDisposed()) throw new DisposedException();
198
199 if(upperStore != null) {
200 AnswerTuples tuples = null;
201 try {
202 tuples = upperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
203 if(!tuples.isValid()) {
204 Utility.logInfo("There are no contradictions derived in " + upperStore.getName() + " materialisation.");
205 Utility.logDebug("The ontology and dataset is satisfiable.");
206 return true;
207 }
208 } finally {
209 if(tuples != null) tuples.dispose();
210 }
211 }
212 return false;
213 }
214
215 boolean check() {
216 if(isDisposed()) throw new DisposedException();
217
218// if (!checkRLLowerBound()) return false;
219// if (!checkELLowerBound()) return false;
220// if (checkLazyUpper()) return true;
221 AnswerTuples iter = null;
222
223 try {
224 iter =
225 m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
226 fullQueryRecord.updateUpperBoundAnswers(iter);
227 } finally {
228 if(iter != null) iter.dispose();
229 }
230
231 if(fullQueryRecord.getNoOfCompleteAnswers() == 0)
232 return true;
233
234 extractBottomFragment();
235
236 try {
237 extractAxioms4Full();
238 } catch(OWLOntologyCreationException e) {
239 e.printStackTrace();
240 }
241// fullQueryRecord.saveRelevantClause();
242
243 boolean satisfiability;
244
245 Checker checker;
246 for(QueryRecord r : getQueryRecords()) {
247 // TODO to be removed ...
248// r.saveRelevantOntology("bottom" + r.getQueryID() + ".owl");
249 checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r);
250 satisfiability = checker.isConsistent();
251 checker.dispose();
252 if(!satisfiability) return false;
253 }
254
255// Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord);
256// boolean satisfiable = checker.isConsistent();
257// checker.dispose();
258// if (!satisfiable) return unsatisfiability(t.duration());
259
260 return true;
261 }
262
263 private void extractAxioms4Full() throws OWLOntologyCreationException {
264 OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager();
265 OWLOntology fullOntology = manager.createOntology();
266 for (QueryRecord record: botQueryRecords) {
267 for (DLClause clause: record.getRelevantClauses()) {
268 fullQueryRecord.addRelevantClauses(clause);
269 }
270 manager.addAxioms(fullOntology, record.getRelevantOntology().getAxioms());
271 }
272 fullQueryRecord.setRelevantOntology(fullOntology);
273 }
274
275 private void extractAxioms() throws OWLOntologyCreationException {
276 OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager();
277 for (QueryRecord record: botQueryRecords) {
278 record.setRelevantOntology(manager.createOntology());
279 QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, record);
280 m_reasoner.encoder.setCurrentQuery(record);
281 tracker.extractAxioms(m_reasoner.trackingStore);
282// record.saveRelevantClause();
283// record.saveRelevantOntology("bottom" + record.getQueryID() + ".owl");
284 Utility.logInfo("finish extracting axioms for bottom " + record.getQueryID());
285 }
286 }
287
288 private String[] collectTrackingProgramAndImport() {
289 String[] programs = new String[botQueryRecords.length];
290 TrackingRuleEncoder encoder = m_reasoner.encoder;
291
292 StringBuilder builder;
293 LinkedList<DLClause> currentClauses = new LinkedList<DLClause>();
294
295 for (int i = 0; i < botQueryRecords.length; ++i) {
296 encoder.setCurrentQuery(botQueryRecords[i]);
297 builder = new StringBuilder(encoder.getTrackingProgram());
298// encoder.saveTrackingRules("tracking_bottom" + (i + 1) + ".dlog");
299
300 for (DLClause clause: toAddClauses)
301 if (clause.getHeadAtom(0).getDLPredicate().toString().contains("_final" + (i + 1)))
302 currentClauses.add(clause);
303
304 builder.append(DLClauseHelper.toString(currentClauses));
305 programs[i] = builder.toString();
306
307 currentClauses.clear();
308 }
309
310 return programs;
311 }
312
313
314}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java
new file mode 100644
index 0000000..29754ce
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java
@@ -0,0 +1,101 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.karma2.profile.ELHOProfile;
4import org.semanticweb.owlapi.model.OWLOntology;
5import uk.ac.ox.cs.pagoda.constraints.UnaryBottom;
6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
7import uk.ac.ox.cs.pagoda.query.QueryRecord;
8import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
9import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine;
10import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram;
11import uk.ac.ox.cs.pagoda.util.Timer;
12import uk.ac.ox.cs.pagoda.util.Utility;
13import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
14
15public class ELHOQueryReasoner extends QueryReasoner {
16
17 LowerDatalogProgram program;
18
19 OWLOntology elho_ontology;
20 KarmaQueryEngine elLowerStore = null;
21
22 private Timer t = new Timer();
23
24 public ELHOQueryReasoner() {
25 elLowerStore = new KarmaQueryEngine("el");
26 }
27
28 @Override
29 public void evaluate(QueryRecord queryRecord) {
30 if(isDisposed()) throw new DisposedException();
31 AnswerTuples elAnswer = null;
32 t.reset();
33 try {
34 elAnswer = elLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
35 queryRecord.updateLowerBoundAnswers(elAnswer);
36 } finally {
37 if(elAnswer != null) elAnswer.dispose();
38 }
39 queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration());
40
41 queryRecord.setDifficulty(Step.EL_LOWER_BOUND);
42 queryRecord.markAsProcessed();
43 }
44
45 @Override
46 public void evaluateUpper(QueryRecord queryRecord) {
47 if(isDisposed()) throw new DisposedException();
48 evaluate(queryRecord);
49 }
50
51 @Override
52 public void dispose() {
53 super.dispose();
54 if(elLowerStore != null) elLowerStore.dispose();
55 }
56
57 @Override
58 public void loadOntology(OWLOntology ontology) {
59 if(isDisposed()) throw new DisposedException();
60 program = new LowerDatalogProgram(properties.getToClassify());
61 program.load(ontology, new UnaryBottom());
62 program.transform();
63
64 importData(program.getAdditionalDataFile());
65
66 elho_ontology = new ELHOProfile().getFragment(ontology);
67 elLowerStore.processOntology(elho_ontology);
68 }
69
70 @Override
71 public boolean preprocess() {
72 if(isDisposed()) throw new DisposedException();
73 elLowerStore.importRDFData("data", getImportedData());
74 String rlLowerProgramText = program.toString();
75// program.save();
76 elLowerStore.materialise("lower program", rlLowerProgramText);
77 elLowerStore.initialiseKarma();
78
79 if(!isConsistent()) {
80 Utility.logDebug("The dataset is not consistent with the ontology.");
81 return false;
82 }
83 return true;
84 }
85
86 @Override
87 public boolean isConsistent() {
88 if(isDisposed()) throw new DisposedException();
89 String[] X = new String[]{"X"};
90 AnswerTuples ans = null;
91 try {
92 ans = elLowerStore.evaluate(QueryRecord.botQueryText, X);
93 if(ans.isValid()) return false;
94 } finally {
95 if(ans != null) ans.dispose();
96 }
97
98 return true;
99 }
100
101}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java
new file mode 100644
index 0000000..15dfa03
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java
@@ -0,0 +1,199 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.karma2.profile.ELHOProfile;
4import org.semanticweb.owlapi.model.OWLOntology;
5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
6import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
7import uk.ac.ox.cs.pagoda.owl.OWLHelper;
8import uk.ac.ox.cs.pagoda.query.AnswerTuples;
9import uk.ac.ox.cs.pagoda.query.QueryRecord;
10import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
11import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
12import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine;
13import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
14import uk.ac.ox.cs.pagoda.util.Timer;
15import uk.ac.ox.cs.pagoda.util.Utility;
16import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
17
18class ELHOUQueryReasoner extends QueryReasoner {
19
20 DatalogProgram program;
21
22 BasicQueryEngine rlLowerStore;
23 BasicQueryEngine rlUpperStore;
24
25 OWLOntology elho_ontology;
26 KarmaQueryEngine elLowerStore = null;
27
28 boolean multiStageTag, equalityTag;
29 String originalMarkProgram;
30 private Timer t = new Timer();
31
32 public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
33 this.multiStageTag = multiStageTag;
34 this.equalityTag = considerEqualities;
35 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
36 elLowerStore = new KarmaQueryEngine("el-lower-bound");
37
38 if(!multiStageTag)
39 rlUpperStore = new BasicQueryEngine("rl-upper-bound");
40 else
41 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false);
42 }
43
44 @Override
45 public void evaluate(QueryRecord queryRecord) {
46 if(isDisposed()) throw new DisposedException();
47 AnswerTuples rlAnswer = null;
48 t.reset();
49 try {
50 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
51 queryRecord.updateLowerBoundAnswers(rlAnswer);
52 } finally {
53 if(rlAnswer != null) rlAnswer.dispose();
54 }
55 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
56
57 String extendedQueryText = queryRecord.getExtendedQueryText().get(0);
58 String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ?
59 new String[]{queryRecord.getQueryText()} :
60 new String[]{queryRecord.getQueryText(), extendedQueryText};
61
62 for(String queryText : toQuery) {
63 rlAnswer = null;
64 t.reset();
65 try {
66 rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables());
67 queryRecord.updateUpperBoundAnswers(rlAnswer);
68 } finally {
69 if(rlAnswer != null) rlAnswer.dispose();
70 }
71 queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration());
72
73 if(queryRecord.isProcessed()) {
74 queryRecord.setDifficulty(Step.UPPER_BOUND);
75 return;
76 }
77 }
78
79 AnswerTuples elAnswer = null;
80 t.reset();
81 try {
82 elAnswer =
83 elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers());
84 queryRecord.updateLowerBoundAnswers(elAnswer);
85 } finally {
86 if(elAnswer != null) elAnswer.dispose();
87 }
88 queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration());
89 }
90
91 @Override
92 public void evaluateUpper(QueryRecord queryRecord) {
93 if(isDisposed()) throw new DisposedException();
94 AnswerTuples rlAnswer = null;
95 try {
96 rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
97 queryRecord.updateUpperBoundAnswers(rlAnswer, true);
98 } finally {
99 if(rlAnswer != null) rlAnswer.dispose();
100 }
101 }
102
103 @Override
104 public void dispose() {
105 super.dispose();
106 if(elLowerStore != null) elLowerStore.dispose();
107 if(rlUpperStore != null) rlUpperStore.dispose();
108 }
109
110 @Override
111 public void loadOntology(OWLOntology o) {
112 if(isDisposed()) throw new DisposedException();
113 if(!equalityTag) {
114 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
115 o = eliminator.getOutputOntology();
116 eliminator.save();
117 }
118
119 OWLOntology ontology = o;
120 program = new DatalogProgram(ontology);
121
122 importData(program.getAdditionalDataFile());
123
124 elho_ontology = new ELHOProfile().getFragment(ontology);
125 elLowerStore.processOntology(elho_ontology);
126 originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
127 }
128
129 @Override
130 public boolean preprocess() {
131 if(isDisposed()) throw new DisposedException();
132 String name = "data", datafile = getImportedData();
133
134 String lowername = "lower program";
135 String rlLowerProgramText = program.getLower().toString();
136
137 rlUpperStore.importRDFData(name, datafile);
138 rlUpperStore.materialise("saturate named individuals", originalMarkProgram);
139
140 int flag = rlUpperStore.materialiseRestrictedly(program, null);
141 if(flag != 1) {
142 if(flag == -1) return false;
143 rlUpperStore.dispose();
144
145 if(!multiStageTag)
146 rlUpperStore = new BasicQueryEngine("rl-upper-bound");
147 else
148 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false);
149 rlUpperStore.importRDFData(name, datafile);
150 rlUpperStore.materialise("saturate named individuals", originalMarkProgram);
151 rlUpperStore.materialiseFoldedly(program, null);
152 }
153 Utility.logInfo("upper store ready.");
154
155 rlLowerStore.importRDFData(name, datafile);
156 rlLowerStore.materialise(lowername, rlLowerProgramText);
157 Utility.logInfo("lower store ready.");
158
159 elLowerStore.importRDFData(name, datafile);
160 elLowerStore.materialise("saturate named individuals", originalMarkProgram);
161 elLowerStore.materialise(lowername, rlLowerProgramText);
162
163 elLowerStore.initialiseKarma();
164 Utility.logInfo("EL lower store ready.");
165
166 if(!isConsistent()) {
167 Utility.logInfo("The dataset is not consistent with the ontology.");
168 return false;
169 }
170 Utility.logInfo("The dataset is consistent.");
171 return true;
172 }
173
174 @Override
175 public boolean isConsistent() {
176 if(isDisposed()) throw new DisposedException();
177 Utility.logInfo("Start checking consistency... ");
178 String[] X = new String[]{"X"};
179 AnswerTuples ans = null;
180 try {
181 ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X);
182 if(!ans.isValid()) return true;
183 } finally {
184 if(ans != null) ans.dispose();
185 }
186
187 ans = null;
188 try {
189 ans = elLowerStore.evaluate(QueryRecord.botQueryText, X);
190 if(ans.isValid()) return false;
191 } finally {
192 if(ans != null) ans.dispose();
193 }
194
195 Utility.logDebug("The consistency of the data has not been determined yet.");
196 return true;
197 }
198
199}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java
new file mode 100644
index 0000000..ac62488
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java
@@ -0,0 +1,122 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.HermiT.Reasoner;
4import org.semanticweb.owlapi.model.*;
5import uk.ac.ox.cs.JRDFox.model.Individual;
6import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
7import uk.ac.ox.cs.pagoda.owl.OWLHelper;
8import uk.ac.ox.cs.pagoda.owl.QueryRoller;
9import uk.ac.ox.cs.pagoda.query.*;
10import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
11import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
12import uk.ac.ox.cs.pagoda.util.Utility;
13import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
14
15import java.io.File;
16import java.io.IOException;
17import java.util.HashSet;
18import java.util.Set;
19
20class HermiTReasoner extends QueryReasoner {
21
22 Reasoner hermit;
23
24 BasicQueryEngine upperStore = null;
25
26 OWLOntology onto;
27 OWLDataFactory factory;
28
29 String importedOntologyPath = null;
30
31 QueryRoller roller;
32 boolean toCheckSatisfiability;
33
34 public HermiTReasoner(boolean toCheckSatisfiability) {
35 this.toCheckSatisfiability = toCheckSatisfiability;
36 }
37
38 @Override
39 public void loadOntology(OWLOntology ontology) {
40 if(isDisposed()) throw new DisposedException();
41 onto = ontology;
42 }
43
44 @Override
45 public boolean preprocess() {
46 if(isDisposed()) throw new DisposedException();
47 OWLOntology tbox = onto;
48 try {
49 onto = OWLHelper.getImportedOntology(tbox, getImportedData().split(ImportDataFileSeparator));
50 importedOntologyPath = OWLHelper.getOntologyPath(onto);
51 } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) {
52 e.printStackTrace();
53 }
54
55 DatalogProgram datalogProgram = new DatalogProgram(tbox);
56 importData(datalogProgram.getAdditionalDataFile());
57 upperStore = new MultiStageQueryEngine("rl-upper", false);
58 upperStore.importRDFData("data", getImportedData());
59 GapByStore4ID gap = new GapByStore4ID(upperStore);
60 upperStore.materialiseFoldedly(datalogProgram, gap);
61 gap.clear();
62
63 factory = onto.getOWLOntologyManager().getOWLDataFactory();
64 roller = new QueryRoller(factory);
65
66 hermit = new Reasoner(onto);
67 return isConsistent();
68 }
69
70 @Override
71 public boolean isConsistent() {
72 if(isDisposed()) throw new DisposedException();
73 if(toCheckSatisfiability)
74 return hermit.isConsistent();
75 return true;
76 }
77
78 @Override
79 public void evaluate(QueryRecord record) {
80 if(isDisposed()) throw new DisposedException();
81 String[] disVars = record.getDistinguishedVariables();
82 Set<OWLNamedIndividual> individuals = onto.getIndividualsInSignature(true);
83 if(disVars.length == 1) {
84 OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]);
85 Set<AnswerTuple> answers = new HashSet<AnswerTuple>();
86 for(OWLNamedIndividual individual : individuals) {
87 Utility.logDebug("checking ... " + individual);
88 if(hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) {
89 answers.add(new AnswerTuple(new Individual[]{Individual.create(individual.toStringID())}));
90 }
91 }
92 record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers));
93 record.markAsProcessed();
94 }
95 else {
96 // FIXME join here
97 record.markAsProcessed();
98 }
99 }
100
101 @Override
102 public void evaluateUpper(QueryRecord record) {
103 if(isDisposed()) throw new DisposedException();
104 AnswerTuples rlAnswer = null;
105 try {
106 rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables());
107 record.updateUpperBoundAnswers(rlAnswer, true);
108 } finally {
109 if(rlAnswer != null) rlAnswer.dispose();
110 }
111 }
112
113 @Override
114 public void dispose() {
115 super.dispose();
116 if(importedOntologyPath != null) {
117 File tmp = new File(importedOntologyPath);
118 if(tmp.exists()) tmp.delete();
119 }
120 }
121
122}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java
new file mode 100644
index 0000000..7847e7c
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java
@@ -0,0 +1,117 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.owlapi.model.OWLOntology;
4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
5import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom;
6import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
7import uk.ac.ox.cs.pagoda.query.AnswerTuples;
8import uk.ac.ox.cs.pagoda.query.QueryRecord;
9import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
10import uk.ac.ox.cs.pagoda.rules.GeneralProgram;
11import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
12import uk.ac.ox.cs.pagoda.util.Utility;
13
14import java.io.File;
15
16public class IterativeRefinement {
17
18 private static final int depthLimit = 1;
19
20 QueryRecord m_record;
21 QueryTracker m_tracker;
22 BasicQueryEngine m_trackingStore;
23 QueryRecord[] botQueryRecords;
24
25 int m_depth = 0;
26 String tempDataFile = "temp.ttl";
27
28 public IterativeRefinement(QueryRecord queryRecord, QueryTracker tracker, BasicQueryEngine trackingStore, QueryRecord[] botQueryRecords) {
29 m_record = queryRecord;
30 m_tracker = tracker;
31 m_trackingStore = trackingStore;
32 this.botQueryRecords = botQueryRecords;
33 }
34
35 public OWLOntology extractWithFullABox(String dataset, BottomStrategy upperBottom) {
36 GeneralProgram program;
37 boolean update;
38 while (m_depth < depthLimit) {
39 ++m_depth;
40 program = new GeneralProgram(m_record.getRelevantClauses(), m_record.getRelevantOntology());
41
42 MultiStageQueryEngine tEngine = new MultiStageQueryEngine("query-tracking", true);
43 try {
44 tEngine.importRDFData("data", dataset);
45 if (tEngine.materialise4SpecificQuery(program, m_record, upperBottom) != 1) {
46 return m_record.getRelevantOntology();
47 }
48
49 AnswerTuples ans = null;
50 try {
51 ans = tEngine.evaluate(m_record.getQueryText());
52 update = m_record.updateUpperBoundAnswers(ans);
53 } finally {
54 if (ans != null) ans.dispose();
55 }
56 } finally {
57 tEngine.dispose();
58 }
59
60 if(m_record.isProcessed())
61 return null;
62
63 if (!update) break;
64
65 m_record.updateSubID();
66 m_tracker.extract(m_trackingStore, botQueryRecords, true);
67 }
68
69 return m_record.getRelevantOntology();
70 }
71
72 public OWLOntology extract(UpperUnaryBottom upperBottom) {
73 GeneralProgram program;
74 boolean update;
75 while (m_depth < depthLimit) {
76 m_record.saveABoxInTurtle(tempDataFile);
77 program = new GeneralProgram(m_record.getRelevantClauses(), m_record.getRelevantOntology());
78
79 MultiStageQueryEngine tEngine = new MultiStageQueryEngine("query-tracking", true);
80 try {
81 tEngine.importRDFData("fragment abox", tempDataFile);
82 if (tEngine.materialise4SpecificQuery(program, m_record, upperBottom) != 1) {
83 return m_record.getRelevantOntology();
84 }
85
86 AnswerTuples ans = null;
87 try {
88 ans = tEngine.evaluate(m_record.getQueryText());
89 update = m_record.updateUpperBoundAnswers(ans);
90 } finally {
91 if (ans != null) ans.dispose();
92 }
93 } finally {
94 tEngine.dispose();
95 }
96
97 if(m_record.isProcessed())
98 return null;
99
100 if (!update) break;
101
102 m_record.updateSubID();
103 m_tracker.extract(m_trackingStore, botQueryRecords, true);
104 }
105
106 return m_record.getRelevantOntology();
107 }
108
109 public void dispose() {
110 File file = new File(tempDataFile);
111 if (file.exists()) {
112 file.delete();
113 Utility.logDebug(file.getAbsolutePath() + " is deleted.");
114 }
115 }
116
117}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
new file mode 100644
index 0000000..f2e29f2
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
@@ -0,0 +1,473 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.karma2.profile.ELHOProfile;
4import org.semanticweb.owlapi.model.OWLOntology;
5import org.semanticweb.owlapi.model.parameters.Imports;
6import uk.ac.ox.cs.JRDFox.JRDFStoreException;
7import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
8import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
9import uk.ac.ox.cs.pagoda.owl.OWLHelper;
10import uk.ac.ox.cs.pagoda.query.AnswerTuples;
11import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
12import uk.ac.ox.cs.pagoda.query.GapByStore4ID2;
13import uk.ac.ox.cs.pagoda.query.QueryRecord;
14import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
15import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
16import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine;
17import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
18import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter;
19import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
20import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder;
21import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderDisjVar1;
22import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap;
23import uk.ac.ox.cs.pagoda.util.ExponentialInterpolation;
24import uk.ac.ox.cs.pagoda.util.PagodaProperties;
25import uk.ac.ox.cs.pagoda.util.Timer;
26import uk.ac.ox.cs.pagoda.util.Utility;
27import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
28import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
29
30import java.util.Collection;
31import java.util.LinkedList;
32
33public class MyQueryReasoner extends QueryReasoner {
34
35 OWLOntology ontology;
36 OWLOntology elho_ontology;
37 DatalogProgram program;
38
39 BasicQueryEngine rlLowerStore = null;
40 KarmaQueryEngine elLowerStore = null;
41 MultiStageQueryEngine lazyUpperStore = null;
42 MultiStageQueryEngine trackingStore = null;
43 TrackingRuleEncoder encoder;
44
45 private boolean equalityTag;
46 private Timer t = new Timer();
47
48 private Collection<String> predicatesWithGap = null;
49 private ConsistencyStatus isConsistent;
50 private ConsistencyManager consistency = new ConsistencyManager(this);
51// private int relevantOntologiesCounter = 0;
52
53 public MyQueryReasoner() {
54 setup(true);
55 }
56
57 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
58 if(!multiStageTag)
59 throw new IllegalArgumentException(
60 "Value \"true\" for parameter \"multiStageTag\" is no longer supported");
61
62 setup(considerEqualities);
63 }
64
65 @Override
66 public void loadOntology(OWLOntology o) {
67 if(isDisposed()) throw new DisposedException();
68 if(!equalityTag) {
69 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
70 o = eliminator.getOutputOntology();
71 eliminator.save();
72 }
73
74 ontology = o;
75 program = new DatalogProgram(ontology);
76// program.getLower().save();
77// program.getUpper().save();
78 program.getGeneral().save();
79
80 if(!program.getGeneral().isHorn())
81 lazyUpperStore = new MultiStageQueryEngine("lazy-upper-bound", true);
82
83 importData(program.getAdditionalDataFile());
84
85 elho_ontology = new ELHOProfile().getFragment(ontology);
86 elLowerStore.processOntology(elho_ontology);
87 }
88
89 public Collection<String> getPredicatesWithGap() {
90 if(isDisposed()) throw new DisposedException();
91 return predicatesWithGap;
92 }
93
94 @Override
95 public boolean preprocess() {
96 if(isDisposed()) throw new DisposedException();
97
98 t.reset();
99 Utility.logInfo("Preprocessing (and checking satisfiability)...");
100
101 String name = "data", datafile = getImportedData();
102 rlLowerStore.importRDFData(name, datafile);
103 rlLowerStore.materialise("lower program", program.getLower().toString());
104// program.getLower().save();
105 if(!consistency.checkRLLowerBound()) {
106 Utility.logDebug("time for satisfiability checking: " + t.duration());
107 isConsistent = ConsistencyStatus.INCONSISTENT;
108 return false;
109 }
110 Utility.logDebug("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber());
111
112 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
113
114 elLowerStore.importRDFData(name, datafile);
115 elLowerStore.materialise("saturate named individuals", originalMarkProgram);
116 elLowerStore.materialise("lower program", program.getLower().toString());
117 elLowerStore.initialiseKarma();
118 if(!consistency.checkELLowerBound()) {
119 Utility.logDebug("time for satisfiability checking: " + t.duration());
120 isConsistent = ConsistencyStatus.INCONSISTENT;
121 return false;
122 }
123
124 if(lazyUpperStore != null) {
125 lazyUpperStore.importRDFData(name, datafile);
126 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram);
127 int tag = lazyUpperStore.materialiseRestrictedly(program, null);
128 if(tag == -1) {
129 Utility.logDebug("time for satisfiability checking: " + t.duration());
130 isConsistent = ConsistencyStatus.INCONSISTENT;
131 return false;
132 }
133 else if(tag != 1) {
134 lazyUpperStore.dispose();
135 lazyUpperStore = null;
136 }
137 }
138 if(consistency.checkUpper(lazyUpperStore)) {
139 isConsistent = ConsistencyStatus.CONSISTENT;
140 Utility.logDebug("time for satisfiability checking: " + t.duration());
141 }
142
143 trackingStore.importRDFData(name, datafile);
144 trackingStore.materialise("saturate named individuals", originalMarkProgram);
145
146// materialiseFullUpper();
147// GapByStore4ID gap = new GapByStore4ID(trackingStore);
148 GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore);
149 trackingStore.materialiseFoldedly(program, gap);
150 predicatesWithGap = gap.getPredicatesWithGap();
151 gap.clear();
152
153 if(program.getGeneral().isHorn())
154 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore);
155 else
156 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore);
157// encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore);
158// encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore);
159// encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore);
160
161 // TODO? add consistency check by Skolem-upper-bound
162
163 if(!isConsistent())
164 return false;
165
166 consistency.extractBottomFragment();
167
168 return true;
169 }
170
171 @Override
172 public boolean isConsistent() {
173 if(isDisposed()) throw new DisposedException();
174
175 if(isConsistent == ConsistencyStatus.UNCHECKED) {
176 isConsistent = consistency.check() ? ConsistencyStatus.CONSISTENT : ConsistencyStatus.INCONSISTENT;
177 Utility.logDebug("time for satisfiability checking: " + t.duration());
178 }
179 if(isConsistent == ConsistencyStatus.CONSISTENT) {
180 Utility.logInfo("The ontology is consistent!");
181 return true;
182 }
183 else {
184 Utility.logInfo("The ontology is inconsistent!");
185 return false;
186 }
187 }
188
189 @Override
190 public void evaluate(QueryRecord queryRecord) {
191 if(isDisposed()) throw new DisposedException();
192
193 if(queryLowerAndUpperBounds(queryRecord))
194 return;
195
196 OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord);
197
198// queryRecord.saveRelevantOntology("/home/alessandro/Desktop/test-relevant-ontology-"+relevantOntologiesCounter+".owl");
199// relevantOntologiesCounter++;
200
201 if(properties.getSkolemUpperBound() == PagodaProperties.SkolemUpperBoundOptions.BEFORE_SUMMARISATION
202 && querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) {
203 return;
204 }
205
206 Utility.logInfo(">> Summarisation <<");
207 HermitSummaryFilter summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT());
208 if(summarisedChecker.check(queryRecord.getGapAnswers()) == 0) {
209 summarisedChecker.dispose();
210 return;
211 }
212
213 if(properties.getSkolemUpperBound() == PagodaProperties.SkolemUpperBoundOptions.AFTER_SUMMARISATION
214 && querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) {
215 summarisedChecker.dispose();
216 return;
217 }
218
219 Utility.logInfo(">> Full reasoning <<");
220 Timer t = new Timer();
221 summarisedChecker.checkByFullReasoner(queryRecord.getGapAnswers());
222 Utility.logDebug("Total time for full reasoner: " + t.duration());
223
224 if(properties.getToCallHermiT())
225 queryRecord.markAsProcessed();
226 summarisedChecker.dispose();
227 }
228
229 @Override
230 public void evaluateUpper(QueryRecord queryRecord) {
231 if(isDisposed()) throw new DisposedException();
232 // TODO? add new upper store
233 AnswerTuples rlAnswer = null;
234 boolean useFull = queryRecord.isBottom() || lazyUpperStore == null;
235 try {
236 rlAnswer =
237 (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
238 queryRecord.updateUpperBoundAnswers(rlAnswer, true);
239 } finally {
240 if(rlAnswer != null) rlAnswer.dispose();
241 }
242 }
243
244 @Override
245 public void dispose() {
246 super.dispose();
247
248 if(encoder != null) encoder.dispose();
249 if(rlLowerStore != null) rlLowerStore.dispose();
250 if(lazyUpperStore != null) lazyUpperStore.dispose();
251 if(elLowerStore != null) elLowerStore.dispose();
252 if(trackingStore != null) trackingStore.dispose();
253 if(consistency != null) consistency.dispose();
254 if(program != null) program.dispose();
255 }
256
257 private void setup(boolean considerEqualities) {
258 if(isDisposed()) throw new DisposedException();
259
260 isConsistent = ConsistencyStatus.UNCHECKED;
261 this.equalityTag = considerEqualities;
262
263 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
264 elLowerStore = new KarmaQueryEngine("elho-lower-bound");
265
266 trackingStore = new MultiStageQueryEngine("tracking", false);
267 }
268
269 protected void internal_importDataFile(String name, String datafile) {
270// addDataFile(datafile);
271 rlLowerStore.importRDFData(name, datafile);
272 if(lazyUpperStore != null)
273 lazyUpperStore.importRDFData(name, datafile);
274 elLowerStore.importRDFData(name, datafile);
275 trackingStore.importRDFData(name, datafile);
276 }
277
278 /**
279 * It deals with blanks nodes differently from variables
280 * according to SPARQL semantics for OWL2 Entailment Regime.
281 * <p>
282 * In particular variables are matched only against named individuals,
283 * and blank nodes against named and anonymous individuals.
284 */
285 private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord,
286 Tuple<String> extendedQuery, Step step) {
287 t.reset();
288
289 Utility.logDebug("First query type");
290 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
291 if(!queryRecord.isProcessed() && !queryRecord.getQueryText().equals(extendedQuery.get(0))) {
292 Utility.logDebug("Second query type");
293 queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables());
294 }
295 if(!queryRecord.isProcessed() && queryRecord.hasNonAnsDistinguishedVariables()) {
296 Utility.logDebug("Third query type");
297 queryUpperBound(upperStore, queryRecord, extendedQuery.get(1), queryRecord.getDistinguishedVariables());
298 }
299
300 queryRecord.addProcessingTime(step, t.duration());
301 if(queryRecord.isProcessed()) {
302 queryRecord.setDifficulty(step);
303 return true;
304 }
305 return false;
306 }
307
308 /**
309 * Returns the part of the ontology relevant for Hermit, while computing the bound answers.
310 */
311 private boolean queryLowerAndUpperBounds(QueryRecord queryRecord) {
312
313 Utility.logInfo(">> Base bounds <<");
314
315 AnswerTuples rlAnswer = null, elAnswer = null;
316
317 t.reset();
318 try {
319 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
320 Utility.logDebug(t.duration());
321 queryRecord.updateLowerBoundAnswers(rlAnswer);
322 } finally {
323 if(rlAnswer != null) rlAnswer.dispose();
324 }
325 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
326
327 Tuple<String> extendedQueryTexts = queryRecord.getExtendedQueryText();
328
329 if(properties.getUseAlwaysSimpleUpperBound() || lazyUpperStore == null) {
330 Utility.logDebug("Tracking store");
331 if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND))
332 return true;
333 }
334
335 if(!queryRecord.isBottom()) {
336 Utility.logDebug("Lazy store");
337 if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND))
338 return true;
339 }
340
341 t.reset();
342 try {
343 elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0),
344 queryRecord.getAnswerVariables(),
345 queryRecord.getLowerBoundAnswers());
346 Utility.logDebug(t.duration());
347 queryRecord.updateLowerBoundAnswers(elAnswer);
348 } finally {
349 if(elAnswer != null) elAnswer.dispose();
350 }
351 queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration());
352
353 if(queryRecord.isProcessed()) {
354 queryRecord.setDifficulty(Step.EL_LOWER_BOUND);
355 return true;
356 }
357
358 return false;
359 }
360
361 private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) {
362 Utility.logInfo(">> Relevant ontology-subset extraction <<");
363
364 t.reset();
365
366 QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord);
367 OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true);
368
369 queryRecord.addProcessingTime(Step.FRAGMENT, t.duration());
370
371 int numOfABoxAxioms = relevantOntologySubset.getABoxAxioms(Imports.INCLUDED).size();
372 int numOfTBoxAxioms = relevantOntologySubset.getAxiomCount() - numOfABoxAxioms;
373 Utility.logInfo("Relevant ontology-subset has been extracted: |ABox|="
374 + numOfABoxAxioms + ", |TBox|=" + numOfTBoxAxioms);
375
376 return relevantOntologySubset;
377 }
378
379 private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) {
380 AnswerTuples rlAnswer = null;
381 try {
382 Utility.logDebug(queryText);
383 rlAnswer = upperStore.evaluate(queryText, answerVariables);
384 Utility.logDebug(t.duration());
385 queryRecord.updateUpperBoundAnswers(rlAnswer);
386 } finally {
387 if(rlAnswer != null) rlAnswer.dispose();
388 }
389 }
390
391 private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) {
392 Utility.logInfo(">> Semi-Skolemisation <<");
393 t.reset();
394
395 DatalogProgram relevantProgram = new DatalogProgram(relevantSubset);
396
397 MultiStageQueryEngine relevantStore =
398 new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true
399
400 relevantStore.importDataFromABoxOf(relevantSubset);
401 String relevantOriginalMarkProgram = OWLHelper.getOriginalMarkProgram(relevantSubset);
402
403 relevantStore.materialise("Mark original individuals", relevantOriginalMarkProgram);
404
405 boolean isFullyProcessed = false;
406 LinkedList<Tuple<Long>> lastTwoTriplesCounts = new LinkedList<>();
407 for (int currentMaxTermDepth = 1; !isFullyProcessed; currentMaxTermDepth++) {
408
409 if(currentMaxTermDepth > properties.getSkolemDepth()) {
410 Utility.logInfo("Maximum term depth reached");
411 break;
412 }
413
414 if(lastTwoTriplesCounts.size() == 2) {
415 if(lastTwoTriplesCounts.get(0).get(1).equals(lastTwoTriplesCounts.get(1).get(1)))
416 break;
417
418 ExponentialInterpolation interpolation = new ExponentialInterpolation(lastTwoTriplesCounts.get(0).get(0),
419 lastTwoTriplesCounts.get(0).get(1),
420 lastTwoTriplesCounts.get(1).get(0),
421 lastTwoTriplesCounts.get(1).get(1));
422 double triplesEstimate = interpolation.computeValue(currentMaxTermDepth);
423
424 Utility.logDebug("Estimate of the number of triples:" + triplesEstimate);
425
426 // exit condition if the query is not fully answered
427 if(triplesEstimate > properties.getMaxTriplesInSkolemStore()) {
428 Utility.logInfo("Interrupting Semi-Skolemisation because of triples count limit");
429 break;
430 }
431 }
432
433 Utility.logInfo("Trying with maximum depth " + currentMaxTermDepth);
434
435 int materialisationTag = relevantStore.materialiseSkolemly(relevantProgram, null,
436 currentMaxTermDepth);
437 queryRecord.addProcessingTime(Step.SKOLEM_UPPER_BOUND, t.duration());
438 if(materialisationTag == -1) {
439 relevantStore.dispose();
440 throw new Error("A consistent ontology has turned out to be " +
441 "inconsistent in the Skolemises-relevant-upper-store");
442 }
443 else if(materialisationTag != 1) {
444 Utility.logInfo("Semi-Skolemised relevant upper store cannot be employed");
445 break;
446 }
447
448 Utility.logInfo("Querying semi-Skolemised upper store...");
449 isFullyProcessed = queryUpperStore(relevantStore, queryRecord,
450 queryRecord.getExtendedQueryText(),
451 Step.SKOLEM_UPPER_BOUND);
452
453 try {
454 lastTwoTriplesCounts.add
455 (new Tuple<>((long) currentMaxTermDepth, relevantStore.getStoreSize()));
456 } catch (JRDFStoreException e) {
457 e.printStackTrace();
458 break;
459 }
460 if(lastTwoTriplesCounts.size() > 2)
461 lastTwoTriplesCounts.remove();
462
463 Utility.logDebug("Last two triples counts:" + lastTwoTriplesCounts);
464 }
465
466 relevantStore.dispose();
467 Utility.logInfo("Semi-Skolemised relevant upper store has been evaluated");
468 return isFullyProcessed;
469 }
470
471 private enum ConsistencyStatus {CONSISTENT, INCONSISTENT, UNCHECKED}
472
473}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java
new file mode 100644
index 0000000..3200216
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryEngine.java
@@ -0,0 +1,15 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import uk.ac.ox.cs.pagoda.query.AnswerTuples;
4import uk.ac.ox.cs.pagoda.util.disposable.Disposable;
5
6import java.util.Collection;
7
8public abstract class QueryEngine extends Disposable {
9
10 public abstract void evaluate(Collection<String> queryTexts, String answerFile);
11
12 public abstract AnswerTuples evaluate(String queryText);
13
14 public abstract AnswerTuples evaluate(String queryText, String[] answerVariables);
15}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
new file mode 100644
index 0000000..e8daa3b
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
@@ -0,0 +1,266 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import com.google.gson.Gson;
4import org.semanticweb.owlapi.model.OWLOntology;
5import uk.ac.ox.cs.pagoda.owl.OWLHelper;
6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
7import uk.ac.ox.cs.pagoda.query.QueryManager;
8import uk.ac.ox.cs.pagoda.query.QueryRecord;
9import uk.ac.ox.cs.pagoda.util.PagodaProperties;
10import uk.ac.ox.cs.pagoda.util.Timer;
11import uk.ac.ox.cs.pagoda.util.Utility;
12import uk.ac.ox.cs.pagoda.util.disposable.Disposable;
13import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
14
15import java.io.BufferedWriter;
16import java.io.File;
17import java.io.IOException;
18import java.nio.file.Files;
19import java.nio.file.Paths;
20import java.util.Collection;
21
22// TODO clean APIs
23public abstract class QueryReasoner extends Disposable {
24
25 public static final String ImportDataFileSeparator = ";";
26 private static final boolean DEFAULT_MULTI_STAGES = true;
27 private static final boolean DEFAULT_EQUALITIES = true;
28 public boolean fullReasoner = this instanceof MyQueryReasoner;
29 // protected boolean forSemFacet = false;
30 PagodaProperties properties;
31 BufferedWriter answerWriter = null;
32 private StringBuilder importedData = new StringBuilder();
33 private QueryManager m_queryManager = new QueryManager();
34
35 public static QueryReasoner getInstance(PagodaProperties p) {
36 OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath());
37 QueryReasoner pagoda = getInstance(ontology, p);
38 pagoda.properties = p;
39 pagoda.loadOntology(ontology);
40 pagoda.importData(p.getDataPath());
41 if(pagoda.preprocess()) {
42 Utility.logInfo("The ontology is consistent!");
43 return pagoda;
44 }
45 else {
46 System.out.println("The ontology is inconsistent!");
47 pagoda.dispose();
48 return null;
49 }
50 }
51
52 public static QueryReasoner getInstance(OWLOntology o) {
53 QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
54 pagoda.properties = new PagodaProperties();
55 return pagoda;
56 }
57
58 private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) {
59 return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
60 }
61
62 public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) {
63// Utility.initialise();
64 QueryReasoner reasoner;
65 if(OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner();
66 else if(OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner();
67 else
68 switch(type) {
69 case RLU:
70 reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities);
71 break;
72 case ELHOU:
73 reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities);
74 break;
75 default:
76 reasoner = new MyQueryReasoner(performMultiStages, considerEqualities);
77 }
78 return reasoner;
79 }
80
81 public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) {
82 return new HermiTReasoner(toCheckSatisfiability);
83 }
84
85 public void setToClassify(boolean flag) {
86 if(isDisposed()) throw new DisposedException();
87 properties.setToClassify(flag);
88 }
89
90 public void setToCallHermiT(boolean flag) {
91 if(isDisposed()) throw new DisposedException();
92 properties.setToCallHermiT(flag);
93 }
94
95 public void importData(String datafile) {
96 if(isDisposed()) throw new DisposedException();
97 if(datafile != null && !datafile.equalsIgnoreCase("null"))
98 importData(datafile.split(ImportDataFileSeparator));
99 }
100
101 public void importData(String[] datafiles) {
102 if(isDisposed()) throw new DisposedException();
103 if(datafiles != null) {
104 for(String datafile : datafiles) {
105 File file = new File(datafile);
106 if(file.exists()) {
107 if(file.isFile()) importDataFile(file);
108 else importDataDirectory(file);
109 }
110 else {
111 Utility.logError("warning: file " + datafile + " doesn't exists.");
112 }
113 }
114 }
115 }
116
117 public abstract void loadOntology(OWLOntology ontology);
118
119 public abstract boolean preprocess();
120
121 public abstract boolean isConsistent();
122
123 public abstract void evaluate(QueryRecord record);
124
125 public abstract void evaluateUpper(QueryRecord record);
126
127 public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) {
128 if(isDisposed()) throw new DisposedException();
129 if(forFacetGeneration) {
130 QueryRecord record = m_queryManager.create(queryText);
131 Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText);
132 if(!record.isProcessed())
133 evaluateUpper(record);
134// AnswerTuples tuples = record.getUpperBoundAnswers();
135// for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) {
136// tuple = tuples.getTuple();
137// if (tuple.toString().contains("NC"))
138// System.out.println(tuple.toString());
139// }
140 return record.getUpperBoundAnswers();
141 }
142 else
143 return evaluate(queryText);
144 }
145
146 public AnswerTuples evaluate(String queryText) {
147 if(isDisposed()) throw new DisposedException();
148 QueryRecord record = m_queryManager.create(queryText);
149 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
150 if(!record.isProcessed())
151 evaluate(record);
152 AnswerTuples answer = record.getAnswers();
153 record.dispose();
154 return answer;
155
156 }
157
158 public void evaluate_shell(String queryText) {
159 if(isDisposed()) throw new DisposedException();
160 QueryRecord record = m_queryManager.create(queryText);
161 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
162 if(!record.isProcessed())
163 evaluate(record);
164 Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple());
165 record.dispose();
166
167 }
168
169 public void evaluate(Collection<QueryRecord> queryRecords) {
170 if(isDisposed()) throw new DisposedException();
171 if(!isConsistent()) {
172 Utility.logDebug("The ontology and dataset is inconsistent.");
173 return;
174 }
175
176 if(properties.getAnswerPath() != null && answerWriter == null) {
177 try {
178 answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath()));
179 } catch(IOException e) {
180 Utility.logError("The answer path is not valid!");
181 e.printStackTrace();
182 }
183 }
184
185 Timer t = new Timer();
186 Gson gson = QueryRecord.GsonCreator.getInstance();
187 for(QueryRecord record : queryRecords) {
188// if (Integer.parseInt(record.getQueryID()) != 218) continue;
189 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------",
190 record.getQueryText());
191 if(!record.isProcessed()) {
192 t.reset();
193 if(!record.isProcessed())
194 evaluate(record);
195 Utility.logInfo("Total time to answer this query: " + t.duration());
196 Utility.logInfo("Difficulty of this query: " + record.getDifficulty());
197 if(!fullReasoner && !record.isProcessed()) {
198 Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds.");
199 continue;
200 }
201 }
202 record.outputAnswerStatistics();
203 record.outputTimes();
204 }
205 /* TODO it can handle one call only
206 if you call twice, you will end up with a json file with multiple roots */
207 if(answerWriter != null) gson.toJson(queryRecords, answerWriter);
208// queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record)));
209 queryRecords.stream().forEach(QueryRecord::dispose);
210 }
211
212 @Override
213 public void dispose() {
214 super.dispose();
215 if(answerWriter != null) {
216 try {
217 answerWriter.close();
218 } catch(IOException e) {
219 e.printStackTrace();
220 }
221 }
222// Utility.cleanup();
223 }
224
225// public void evaluate(Collection<QueryRecord> queryRecords) {
226// evaluate(queryRecords);
227// }
228
229 public QueryManager getQueryManager() {
230 if(isDisposed()) throw new DisposedException();
231 return m_queryManager;
232 }
233
234 protected String getImportedData() {
235 return importedData.toString();
236 }
237
238 private void importDataDirectory(File file) {
239 for(File child : file.listFiles())
240 if(child.isFile()) importDataFile(child);
241 else importDataDirectory(child);
242 }
243
244 private void importDataFile(File file) {
245 String datafile;
246 try {
247 datafile = file.getCanonicalPath();
248 } catch(IOException e) {
249 e.printStackTrace();
250 return;
251 }
252 importDataFile(datafile);
253 }
254
255 protected final void importDataFile(String datafile) {
256 if(importedData.length() == 0)
257 importedData.append(datafile);
258 else
259 importedData.append(ImportDataFileSeparator).append(datafile);
260
261 }
262
263
264 public enum Type {Full, RLU, ELHOU}
265
266}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java
new file mode 100644
index 0000000..32f5541
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java
@@ -0,0 +1,86 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.owlapi.model.OWLOntology;
4import uk.ac.ox.cs.pagoda.constraints.UnaryBottom;
5import uk.ac.ox.cs.pagoda.query.AnswerTuples;
6import uk.ac.ox.cs.pagoda.query.QueryRecord;
7import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
8import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
9import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine;
10import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram;
11import uk.ac.ox.cs.pagoda.util.Timer;
12import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
13
14public class RLQueryReasoner extends QueryReasoner {
15
16 RDFoxQueryEngine rlLowerStore = null;
17
18 LowerDatalogProgram program;
19 Timer t = new Timer();
20
21 public RLQueryReasoner() {
22 rlLowerStore = new BasicQueryEngine("rl");
23 }
24
25 @Override
26 public void evaluate(QueryRecord queryRecord) {
27 if(isDisposed()) throw new DisposedException();
28 AnswerTuples rlAnswer = null;
29 t.reset();
30 try {
31 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
32 queryRecord.updateLowerBoundAnswers(rlAnswer);
33 } finally {
34 if (rlAnswer != null) rlAnswer.dispose();
35 }
36 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
37 queryRecord.setDifficulty(Step.LOWER_BOUND);
38 queryRecord.markAsProcessed();
39 }
40
41 @Override
42 public void dispose() {
43 super.dispose();
44 if(rlLowerStore != null) rlLowerStore.dispose();
45 }
46
47 @Override
48 public void loadOntology(OWLOntology ontology) {
49 if(isDisposed()) throw new DisposedException();
50 program = new LowerDatalogProgram();
51 program.load(ontology, new UnaryBottom());
52 program.transform();
53
54 importData(program.getAdditionalDataFile());
55 }
56
57 @Override
58 public boolean preprocess() {
59 if(isDisposed()) throw new DisposedException();
60 rlLowerStore.importRDFData("data", getImportedData());
61 rlLowerStore.materialise("lower program", program.toString());
62
63 return isConsistent();
64 }
65
66 @Override
67 public boolean isConsistent() {
68 if(isDisposed()) throw new DisposedException();
69 AnswerTuples ans = null;
70 try {
71 ans = rlLowerStore.evaluate(QueryRecord.botQueryText, new String[] {"X"});
72 return !ans.isValid();
73 } finally {
74 if (ans != null) ans.dispose();
75
76 }
77
78 }
79
80 @Override
81 public void evaluateUpper(QueryRecord record) {
82 if(isDisposed()) throw new DisposedException();
83 evaluate(record);
84 }
85
86}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java
new file mode 100644
index 0000000..368fbb2
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java
@@ -0,0 +1,135 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.owlapi.model.OWLOntology;
4import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
5import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
7import uk.ac.ox.cs.pagoda.query.QueryRecord;
8import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
9import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
10import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
11import uk.ac.ox.cs.pagoda.util.Timer;
12import uk.ac.ox.cs.pagoda.util.Utility;
13import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
14
15class RLUQueryReasoner extends QueryReasoner {
16
17 DatalogProgram program;
18
19 BasicQueryEngine rlLowerStore, rlUpperStore;
20
21 boolean multiStageTag, equalityTag;
22 Timer t = new Timer();
23
24 public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
25 this.multiStageTag = multiStageTag;
26 this.equalityTag = considerEqualities;
27 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
28 if(!multiStageTag)
29 rlUpperStore = new BasicQueryEngine("rl-upper-bound");
30 else
31 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false);
32 }
33
34 @Override
35 public void evaluate(QueryRecord queryRecord) {
36 if(isDisposed()) throw new DisposedException();
37 AnswerTuples ans = null;
38 t.reset();
39 try {
40 ans = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
41 Utility.logDebug(t.duration());
42 queryRecord.updateLowerBoundAnswers(ans);
43 } finally {
44 if (ans != null) ans.dispose();
45 }
46 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
47
48 ans = null;
49 t.reset();
50 try {
51 ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
52 Utility.logDebug(t.duration());
53 queryRecord.updateUpperBoundAnswers(ans);
54 } finally {
55 if (ans != null) ans.dispose();
56 }
57 queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration());
58
59 if(queryRecord.isProcessed())
60 queryRecord.setDifficulty(Step.UPPER_BOUND);
61 }
62
63 @Override
64 public void evaluateUpper(QueryRecord queryRecord) {
65 if(isDisposed()) throw new DisposedException();
66 AnswerTuples ans = null;
67 try {
68 ans = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
69 Utility.logDebug(t.duration());
70 queryRecord.updateUpperBoundAnswers(ans, true);
71 } finally {
72 if (ans != null) ans.dispose();
73 }
74 }
75
76 @Override
77 public void dispose() {
78 super.dispose();
79 if (rlLowerStore != null) rlLowerStore.dispose();
80 if (rlUpperStore != null) rlUpperStore.dispose();
81 }
82
83 @Override
84 public void loadOntology(OWLOntology o) {
85 if(isDisposed()) throw new DisposedException();
86 if (!equalityTag) {
87 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
88 o = eliminator.getOutputOntology();
89 eliminator.save();
90 }
91
92 OWLOntology ontology = o;
93 program = new DatalogProgram(ontology);
94 importData(program.getAdditionalDataFile());
95 }
96
97 @Override
98 public boolean preprocess() {
99 if(isDisposed()) throw new DisposedException();
100 String datafile = getImportedData();
101 rlLowerStore.importRDFData("data", datafile);
102 rlLowerStore.materialise("lower program", program.getLower().toString());
103
104 rlUpperStore.importRDFData("data", datafile);
105 rlUpperStore.materialiseRestrictedly(program, null);
106
107 return isConsistent();
108
109 }
110
111 @Override
112 public boolean isConsistent() {
113 if(isDisposed()) throw new DisposedException();
114 String[] X = new String[] { "X" };
115 AnswerTuples ans = null;
116 try {
117 ans = rlLowerStore.evaluate(QueryRecord.botQueryText, X);
118 if (ans.isValid()) return false;
119 } finally {
120 if (ans != null) ans.dispose();
121 }
122
123 ans = null;
124 try {
125 ans = rlUpperStore.evaluate(QueryRecord.botQueryText, X);
126 if (!ans.isValid()) return true;
127 } finally {
128 if (ans != null) ans.dispose();
129 }
130
131 Utility.logDebug("The consistency of the data has not been determined yet.");
132 return true;
133 }
134
135}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java
new file mode 100644
index 0000000..07adc6d
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/Checker.java
@@ -0,0 +1,17 @@
1package uk.ac.ox.cs.pagoda.reasoner.full;
2
3import uk.ac.ox.cs.pagoda.query.AnswerTuple;
4import uk.ac.ox.cs.pagoda.query.AnswerTuples;
5import uk.ac.ox.cs.pagoda.util.disposable.Disposable;
6
7public abstract class Checker extends Disposable {
8
9 public abstract int check(AnswerTuples answers);
10
11 public abstract boolean check(AnswerTuple answer);
12
13 public abstract boolean isConsistent();
14
15 public abstract int getNoOfCalls();
16
17}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java
new file mode 100644
index 0000000..3f3c22d
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/full/HermitChecker.java
@@ -0,0 +1,282 @@
1package uk.ac.ox.cs.pagoda.reasoner.full;
2
3import org.semanticweb.HermiT.Reasoner;
4import org.semanticweb.HermiT.model.DLClause;
5import org.semanticweb.HermiT.model.Term;
6import org.semanticweb.HermiT.model.Variable;
7import org.semanticweb.owlapi.model.*;
8import org.semanticweb.owlapi.model.parameters.Imports;
9import uk.ac.ox.cs.pagoda.endomorph.Clique;
10import uk.ac.ox.cs.pagoda.endomorph.DependencyGraph;
11import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
12import uk.ac.ox.cs.pagoda.query.AnswerTuple;
13import uk.ac.ox.cs.pagoda.query.AnswerTuples;
14import uk.ac.ox.cs.pagoda.query.QueryRecord;
15import uk.ac.ox.cs.pagoda.query.rollup.QueryGraph;
16import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
17import uk.ac.ox.cs.pagoda.util.Namespace;
18import uk.ac.ox.cs.pagoda.util.Timer;
19import uk.ac.ox.cs.pagoda.util.Utility;
20import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
21
22import java.util.HashMap;
23import java.util.HashSet;
24import java.util.Map;
25import java.util.Set;
26
27public class HermitChecker extends Checker {
28
29 protected OWLDataFactory factory;
30 protected String[][] answerVariable;
31 protected OWLOntology ontology;
32 protected QueryRecord record;
33 protected QueryGraph qGraph = null;
34 boolean toCheck = true;
35 AnswerTuple topAnswerTuple = null, botAnswerTuple = null;
36 private String queryText;
37 private DLClause queryClause;
38 private Reasoner hermit;
39 private int tag = 0;
40
41 public int getNoOfCalls() {
42 if(isDisposed()) throw new DisposedException();
43 return noOfCalls;
44 }
45
46 private int noOfCalls = 0;
47 private DependencyGraph dGraph = null;
48
49 public HermitChecker(Checker checker) {
50 if(checker instanceof HermitChecker) {
51 HermitChecker other = (HermitChecker) checker;
52 factory = other.factory;
53 queryText = other.queryText;
54 queryClause = other.queryClause;
55 answerVariable = other.answerVariable;
56 ontology = other.ontology;
57// record = other.record;
58 }
59
60 hermit = new Reasoner(ontology);
61 }
62
63 public HermitChecker(OWLOntology ontology, QueryRecord record, boolean toCheck) {
64 this.ontology = ontology;
65 queryText = record.getQueryText();
66 answerVariable = record.getVariables();
67 queryClause = record.getClause();
68// this.record = record;
69 this.toCheck = toCheck;
70 }
71
72 public HermitChecker(OWLOntology ontology, String queryText) {
73 this.ontology = ontology;
74 this.queryText = queryText;
75 answerVariable = queryText == null ? null : ConjunctiveQueryHelper.getAnswerVariables(queryText);
76 queryClause = DLClauseHelper.getQuery(queryText, null);
77// this.record = null;
78 }
79
80 @Override
81 public int check(AnswerTuples answers) {
82 if(isDisposed()) throw new DisposedException();
83
84 if(hermit == null) initialiseReasoner();
85 int answerCounter = 0, counter = 0;
86 for(; answers.isValid(); answers.moveNext()) {
87 ++counter;
88 if(check(answers.getTuple())) ++answerCounter;
89 }
90 answers.dispose();
91
92 Utility.logDebug("The number of individuals to be checked by HermiT: " + counter,
93 "The number of correct answers: " + answerCounter);
94 return answerCounter;
95 }
96
97 @Override
98 public boolean check(AnswerTuple answerTuple) {
99 if(isDisposed()) throw new DisposedException();
100
101 if(!toCheck) return false;
102 ++noOfCalls;
103 if(tag != 0) return tag == 1;
104 if(hermit == null) initialiseReasoner();
105
106 Timer t = new Timer();
107 Map<Variable, Term> sub = answerTuple.getAssignment(answerVariable[1]);
108 Set<OWLAxiom> toCheckAxioms = qGraph.getAssertions(sub);
109
110 // TODO complete
111 Set<OWLAxiom> toCheckExistentialAxioms = qGraph.getExistentialAxioms(sub);
112
113 // TODO possibly inefficient
114 for(OWLAxiom subclassAxiom : toCheckExistentialAxioms) {
115 Utility.logDebug("Checking consistency of ontology union " + subclassAxiom);
116 ontology.getOWLOntologyManager().addAxiom(ontology, subclassAxiom);
117 hermit.flush();
118 if(hermit.isConsistent()) {
119 ontology.getOWLOntologyManager().removeAxiom(ontology, subclassAxiom);
120 hermit.flush();
121 Utility.logDebug("@TIME to check one tuple: " + t.duration());
122 return false;
123 }
124 ontology.getOWLOntologyManager().removeAxiom(ontology, subclassAxiom);
125 hermit.flush();
126 }
127
128
129// for (OWLAxiom axiom: toCheckAxioms) System.out.println(axiom.toString());
130
131// Utility.logInfo(toCheckAxioms);
132
133 if(hermit.isEntailed(toCheckAxioms)) {
134 Utility.logDebug("@TIME to check one tuple: " + t.duration());
135 return true;
136 }
137 Utility.logDebug("@TIME to check one tuple: " + t.duration());
138 return false;
139 }
140
141 @Override
142 public boolean isConsistent() {
143 if(isDisposed()) throw new DisposedException();
144
145 if(hermit == null) initialiseReasoner();
146 return hermit.isConsistent();
147 }
148
149 public void dispose() {
150 super.dispose();
151
152 Utility.logDebug("Disposing of an instance of Hermit after " + noOfCalls + " calls");
153 if(hermit != null) hermit.dispose();
154 hermit = null;
155 }
156
157 public void setDependencyGraph(DependencyGraph dGraph) {
158 if(isDisposed()) throw new DisposedException();
159
160 this.dGraph = dGraph;
161 }
162
163 private void initialiseReasoner() {
164 qGraph = new QueryGraph(queryClause.getBodyAtoms(), answerVariable[1], ontology);
165 OWLOntologyManager manager = ontology.getOWLOntologyManager();
166 factory = manager.getOWLDataFactory();
167
168 if(hermit != null) hermit.dispose();
169
170 if(dGraph != null && answerVariable[1].length == 1 && (dGraph.getExits().size() > 1 || dGraph.getEntrances()
171 .size() > 1)) {
172 Set<OWLAxiom> topAxioms = new HashSet<OWLAxiom>();
173 Set<OWLAxiom> botAxioms = new HashSet<OWLAxiom>();
174 addTopAndBotTuple(topAxioms, botAxioms);
175 manager.addAxioms(ontology, topAxioms);
176 manager.addAxioms(ontology, botAxioms);
177 hermit = new Reasoner(ontology);
178 boolean topValid = true;
179 if(!hermit.isConsistent() || topAnswerTuple != null && (topValid = check(topAnswerTuple))) {
180 hermit.dispose();
181 manager.removeAxioms(ontology, topAxioms);
182 hermit = new Reasoner(ontology);
183 }
184 else {
185 if(!topValid) tag = -1;
186 else if(botAnswerTuple != null && check(botAnswerTuple)) tag = 1;
187 }
188 }
189 else
190 hermit = new Reasoner(ontology);
191 }
192
193 private void addTopAndBotTuple(Set<OWLAxiom> topAxioms, Set<OWLAxiom> botAxioms) {
194 String top_str = Namespace.PAGODA_ANONY + "top", bot_str = Namespace.PAGODA_ANONY + "bot";
195 topAnswerTuple =
196 new AnswerTuple(
197 new uk.ac.ox.cs.JRDFox.model.Individual[]{uk.ac.ox.cs.JRDFox.model.Individual.create(top_str)});
198 botAnswerTuple =
199 new AnswerTuple(
200 new uk.ac.ox.cs.JRDFox.model.Individual[]{uk.ac.ox.cs.JRDFox.model.Individual.create(bot_str)});
201 OWLIndividual top_ind = factory.getOWLNamedIndividual(IRI.create(top_str)), bot_ind =
202 factory.getOWLNamedIndividual(IRI.create(bot_str));
203 Map<OWLAxiom, Integer> counter = new HashMap<OWLAxiom, Integer>();
204
205 Set<String> topAnswers = new HashSet<String>(), botAnswers = new HashSet<String>();
206 OWLIndividual sub, obj;
207 if(dGraph.getExits().size() > 1) {
208 for(Clique answerClique : dGraph.getExits())
209 topAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative()
210 .getAnswerTuple()
211 .getGroundTerm(0)).getIRI());
212 }
213 else topAnswerTuple = null;
214
215 if(dGraph.getEntrances().size() > 1) {
216 for(Clique answerClique : dGraph.getEntrances())
217 botAnswers.add(((uk.ac.ox.cs.JRDFox.model.Individual) answerClique.getRepresentative()
218 .getAnswerTuple()
219 .getGroundTerm(0)).getIRI());
220 }
221 else botAnswerTuple = null;
222
223 for(OWLAxiom axiom : ontology.getABoxAxioms(Imports.INCLUDED))
224 if(axiom instanceof OWLClassAssertionAxiom) {
225 OWLClassAssertionAxiom ca = (OWLClassAssertionAxiom) axiom;
226 sub = ca.getIndividual();
227 if(topAnswers.contains(sub.toStringID()))
228 topAxioms.add(factory.getOWLClassAssertionAxiom(ca.getClassExpression(), top_ind));
229 if(botAnswers.contains(sub.toStringID()))
230 inc(counter, factory.getOWLClassAssertionAxiom(ca.getClassExpression(), bot_ind));
231 }
232 else if(axiom instanceof OWLObjectPropertyAssertionAxiom) {
233 OWLObjectPropertyAssertionAxiom oa = (OWLObjectPropertyAssertionAxiom) axiom;
234 sub = oa.getSubject();
235 obj = oa.getObject();
236 if(topAnswers.contains(sub.toStringID()))
237 if(topAnswers.contains(obj.toStringID()))
238 topAxioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), top_ind, top_ind));
239 else
240 topAxioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), top_ind, obj));
241 else {
242 if(topAnswers.contains(obj.toStringID()))
243 topAxioms.add(factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), sub, top_ind));
244 }
245
246 if(botAnswers.contains(sub.toStringID()))
247 if(botAnswers.contains(obj.toStringID()))
248 inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), bot_ind, bot_ind));
249 else
250 inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), bot_ind, obj));
251 else {
252 if(botAnswers.contains(obj.toStringID()))
253 inc(counter, factory.getOWLObjectPropertyAssertionAxiom(oa.getProperty(), sub, bot_ind));
254 }
255
256 }
257 else if(axiom instanceof OWLDataPropertyAssertionAxiom) {
258 OWLDataPropertyAssertionAxiom da = (OWLDataPropertyAssertionAxiom) axiom;
259 sub = da.getSubject();
260 if(topAnswers.contains(sub.toStringID()))
261 topAxioms.add(factory.getOWLDataPropertyAssertionAxiom(da.getProperty(), top_ind, da.getObject()));
262
263 if(botAnswers.contains(sub.toStringID()))
264 inc(counter, factory.getOWLDataPropertyAssertionAxiom(da.getProperty(), bot_ind, da.getObject()));
265 }
266
267 int number = botAnswers.size();
268 for(Map.Entry<OWLAxiom, Integer> entry : counter.entrySet()) {
269 if(entry.getValue() == number)
270 botAxioms.add(entry.getKey());
271 }
272 }
273
274 private void inc(Map<OWLAxiom, Integer> counter, OWLAxiom newAxiom) {
275 if(isDisposed()) throw new DisposedException();
276
277 Integer number = counter.get(newAxiom);
278 if(number == null) counter.put(newAxiom, 1);
279 else counter.put(newAxiom, number + 1);
280 }
281
282}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java
new file mode 100644
index 0000000..034827e
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java
@@ -0,0 +1,422 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.HermiT.model.DLClause;
4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
5import uk.ac.ox.cs.JRDFox.store.DataStore;
6import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
7import uk.ac.ox.cs.JRDFox.store.Parameters;
8import uk.ac.ox.cs.JRDFox.store.TripleStatus;
9import uk.ac.ox.cs.JRDFox.store.TupleIterator;
10import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
11import uk.ac.ox.cs.pagoda.query.AnswerTuples;
12import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
13import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
14import uk.ac.ox.cs.pagoda.rules.Program;
15import uk.ac.ox.cs.pagoda.util.*;
16import uk.ac.ox.cs.pagoda.util.Timer;
17import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
18
19import java.util.*;
20
21public class BasicQueryEngine extends RDFoxQueryEngine {
22
23 protected DataStore store;
24 protected Parameters parameters = new Parameters();
25 Set<DLClause> materialisedRules = new HashSet<DLClause>();
26 private UFS<String> equalityGroups = null;
27
28 public BasicQueryEngine(String name) {
29 super(name);
30 store = RDFoxQueryEngine.createDataStore();
31 parameters.m_allAnswersInRoot = true;
32 parameters.m_useBushy = true;
33 }
34
35 /***
36 * @return Overall number of triples.
37 */
38 public long getStoreSize() throws JRDFStoreException {
39 return store.getTriplesCount();
40 }
41
42 public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) {
43 if(isDisposed()) throw new DisposedException();
44 if(gap != null) {
45 materialise("lower program", dProgram.getLower().toString());
46 String program = dProgram.getUpper().toString();
47 try {
48 gap.compile(program);
49 gap.addBackTo();
50 getDataStore().clearRulesAndMakeFactsExplicit();
51 } catch(JRDFStoreException e) {
52 e.printStackTrace();
53 } finally {
54 gap.clear();
55 }
56 }
57 else
58 materialise("upper program", dProgram.getUpper().toString());
59 }
60
61 public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) {
62 if(isDisposed()) throw new DisposedException();
63 if(gap != null) {
64 materialise("lower program", dProgram.getLower().toString());
65 String program = dProgram.getUpper().toString();
66 try {
67 gap.compile(program);
68 gap.addBackTo();
69 getDataStore().clearRulesAndMakeFactsExplicit();
70 } catch(JRDFStoreException e) {
71 e.printStackTrace();
72 } finally {
73 gap.clear();
74 }
75 }
76 else
77 materialise("upper program", dProgram.getUpper().toString());
78
79 return 1;
80 }
81
82 @Override
83 public AnswerTuples evaluate(String queryText) {
84 if(isDisposed()) throw new DisposedException();
85 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]);
86 }
87
88 @Override
89 public AnswerTuples evaluate(String queryText, String[] answerVars) {
90 if(isDisposed()) throw new DisposedException();
91 TupleIterator tupleIterator;
92 try {
93 tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters);
94 } catch(JRDFStoreException e) {
95 e.printStackTrace();
96 return null;
97 }
98 return new RDFoxAnswerTuples(answerVars, tupleIterator);
99 }
100
101 @Override
102 public DataStore getDataStore() {
103 if(isDisposed()) throw new DisposedException();
104 return store;
105 }
106
107 @Override
108 public void dispose() {
109 super.dispose();
110 store.dispose();
111 }
112
113 public void outputInstance4BinaryPredicate(String iri, String filename) {
114 if(isDisposed()) throw new DisposedException();
115
116 Utility.redirectCurrentOut(filename);
117 outputInstance4BinaryPredicate(iri);
118 Utility.closeCurrentOut();
119 }
120
121 public void outputInstance4BinaryPredicate(String iri) {
122 if(isDisposed()) throw new DisposedException();
123
124 outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }");
125 }
126
127 public void outputInstanceNumbers(String filename) {
128 if(isDisposed()) throw new DisposedException();
129
130 TupleIterator predicateTuples = null;
131 TupleIterator instanceTuples;
132 Set<String> number = new HashSet<String>();
133 String predicate;
134 try {
135 predicateTuples =
136 getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters);
137 for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) {
138 predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0));
139 instanceTuples = null;
140 try {
141 instanceTuples =
142 getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters);
143 long totalCount = 0;
144 for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) {
145 totalCount += instanceTuples.getMultiplicity();
146 }
147 number.add(predicate + " * " + totalCount);
148 } finally {
149 if(instanceTuples != null) instanceTuples.dispose();
150 }
151 }
152 } catch(JRDFStoreException e) {
153 e.printStackTrace();
154 } finally {
155 if(predicateTuples != null) predicateTuples.dispose();
156 predicateTuples = null;
157 }
158
159 try {
160 predicateTuples =
161 getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters);
162 for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) {
163 predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0));
164 instanceTuples = null;
165 try {
166 instanceTuples =
167 getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters);
168 long totalCount = 0;
169 for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext())
170 totalCount += instanceTuples.getMultiplicity();
171 number.add(predicate + " * " + totalCount);
172 } finally {
173 if(instanceTuples != null) instanceTuples.dispose();
174 }
175 }
176
177 } catch(JRDFStoreException e) {
178 e.printStackTrace();
179 } finally {
180 if(predicateTuples != null) predicateTuples.dispose();
181 predicateTuples = null;
182 }
183
184 Utility.redirectCurrentOut(filename);
185 String[] ordered = number.toArray(new String[0]);
186 Arrays.sort(ordered, new DLPredicateComparator());
187 for(String line : ordered) System.out.println(line);
188 Utility.closeCurrentOut();
189
190 }
191
192 public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException {
193 if(isDisposed()) throw new DisposedException();
194
195 TupleIterator iter =
196 store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB);
197// iter.open();
198 return iter;
199 }
200
201 public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException {
202 if(isDisposed()) throw new DisposedException();
203
204 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters);
205// iter.open();
206 return iter;
207 }
208
209 public void setExpandEquality(boolean flag) {
210 if(isDisposed()) throw new DisposedException();
211
212 parameters.m_expandEquality = flag;
213 }
214
215 public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException {
216 if(isDisposed()) throw new DisposedException();
217
218 parameters.m_expandEquality = false;
219 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters);
220// iter.open();
221 parameters.m_expandEquality = true;
222 return iter;
223 }
224
225 public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException {
226 if(isDisposed()) throw new DisposedException();
227
228 return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText);
229 }
230
231 public String getUnusedRules(Collection<DLClause> clauses, boolean toUpdate) {
232 if(isDisposed()) throw new DisposedException();
233
234 DLClause clause;
235 for(Iterator<DLClause> iter = clauses.iterator(); iter.hasNext(); ) {
236 if(materialisedRules.contains(clause = iter.next()))
237 iter.remove();
238 else if(toUpdate) materialisedRules.add(clause);
239 }
240
241 if(clauses.isEmpty()) return null;
242
243 return Program.toString(clauses);
244 }
245
246 public void outputMaterialisedRules() {
247 if(isDisposed()) throw new DisposedException();
248
249 System.out.println(DLClauseHelper.toString(materialisedRules));
250 }
251
252 public void outputAnswers(String query) {
253 if(isDisposed()) throw new DisposedException();
254
255 TupleIterator iter = null;
256 try {
257 iter = internal_evaluate(query);
258 System.out.println(query);
259 int arity = iter.getArity();
260 for(long multi = iter.open(); multi != 0; multi = iter.getNext()) {
261 for(int i = 0; i < arity; ++i)
262 System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t");
263 System.out.println();
264 }
265 } catch(JRDFStoreException e) {
266 e.printStackTrace();
267 } finally {
268 if(iter != null) iter.dispose();
269 }
270 }
271
272 public void outputInstance4UnaryPredicate(String iri) {
273 if(isDisposed()) throw new DisposedException();
274
275 outputAnswers("select ?x where { ?x "
276 + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <"
277 + iri
278 + "> .}");
279 }
280
281 public void outputSubjects(String p, String o) {
282 if(isDisposed()) throw new DisposedException();
283
284 outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }");
285 }
286
287 public void outputObjects(String s, String p) {
288 if(isDisposed()) throw new DisposedException();
289
290 outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }");
291 }
292
293 public void outputIDBFacts() {
294 if(isDisposed()) throw new DisposedException();
295
296 TupleIterator iter = null;
297 try {
298 iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }");
299 for(long multi = iter.open(); multi != 0; multi = iter.getNext()) {
300 for(int i = 0; i < 3; ++i)
301 System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t");
302 System.out.println();
303 }
304 } catch(JRDFStoreException e) {
305 // TODO Auto-generated catch block
306 e.printStackTrace();
307 } finally {
308 if(iter != null) iter.dispose();
309 }
310
311 }
312
313 public void outputType4Individual(String iri) {
314 if(isDisposed()) throw new DisposedException();
315
316 outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }");
317 }
318
319 public int getSameAsNumber() {
320 if(isDisposed()) throw new DisposedException();
321
322 TupleIterator iter = null;
323 int counter = 0;
324 try {
325 iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }");
326 for(long multi = iter.open(); multi != 0; multi = iter.getNext())
327 if(iter.getResourceID(0) != iter.getResourceID(1))
328 ++counter;
329 } catch(JRDFStoreException e) {
330 e.printStackTrace();
331 } finally {
332 if(iter != null) iter.dispose();
333 }
334 return counter;
335 }
336
337 public UFS<String> getEqualityGroups(boolean reuse) {
338 if(isDisposed()) throw new DisposedException();
339
340 if(reuse && equalityGroups != null) return equalityGroups;
341
342 equalityGroups = new UFS<String>();
343
344 TupleIterator answers = null;
345 try {
346 Timer t = new Timer();
347 answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }");
348 for(long multi = answers.open(); multi != 0; multi = answers.getNext()) {
349 if(answers.getResourceID(0) != answers.getResourceID(1))
350 equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm);
351 }
352 Utility.logInfo("@Time to group individuals by equality: " + t.duration());
353 } catch(JRDFStoreException e) {
354 e.printStackTrace();
355 } finally {
356 if(answers != null) answers.dispose();
357 }
358
359 return equalityGroups;
360 }
361
362 public void clearRulesAndIDBFacts(Collection<int[]> collection) {
363 if(isDisposed()) throw new DisposedException();
364
365// performDeletion(collection);
366 collection.clear();
367 try {
368 store.clearRulesAndMakeFactsExplicit();
369 } catch(JRDFStoreException e) {
370 e.printStackTrace();
371 }
372 }
373
374 protected void outputClassAssertions(String filename) {
375 TupleIterator allTuples = null;
376 boolean redirect = false;
377 try {
378 allTuples =
379 getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters);
380 redirect = Utility.redirectCurrentOut(filename);
381 for(long multi = allTuples.open(); multi != 0; multi = allTuples.getNext())
382 System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager
383 .getQuotedTerm(allTuples.getResource(1)));
384 } catch(JRDFStoreException e) {
385 e.printStackTrace();
386 } finally {
387 if(redirect) Utility.closeCurrentOut();
388 if(allTuples != null) allTuples.dispose();
389 }
390 }
391
392 @SuppressWarnings("unused")
393 private void performDeletion(Collection<int[]> collection) {
394 Utility.logInfo("Remove all rules, idb facts and added staff...");
395 Timer timer = new Timer();
396 TupleIterator iter = null;
397 try {
398 UpdateType ut = UpdateType.ScheduleForDeletion;
399 for(int[] t : collection)
400 store.addTriplesByResourceIDs(t, ut);
401
402 try {
403 iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }");
404 for(long multi = iter.open(); multi != 0; multi = iter.getNext()) {
405 int[] triple = new int[3];
406 for(int i = 0; i < 3; ++i)
407 triple[i] = iter.getResourceID(i);
408 store.addTriplesByResourceIDs(triple, ut);
409 }
410 } finally {
411 if(iter != null) iter.dispose();
412 iter = null;
413 }
414 store.applyReasoning(true);
415 } catch(JRDFStoreException e) {
416 e.printStackTrace();
417 }
418 Utility.logInfo("Time for deletion: " + timer.duration());
419 }
420
421
422}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
new file mode 100644
index 0000000..05e399e
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
@@ -0,0 +1,24 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import uk.ac.ox.cs.pagoda.multistage.Normalisation;
4import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
5
6import java.util.Comparator;
7
8public class DLPredicateComparator implements Comparator<String> {
9
10 @Override
11 public int compare(String arg0, String arg1) {
12 int ret = type(arg0) - type(arg1);
13 if (ret != 0) return ret;
14
15 return arg0.compareTo(arg1);
16 }
17
18 private int type(String p) {
19 if (p.contains(OverApproxExist.negativeSuffix)) return 1;
20 if (p.contains(Normalisation.auxiliaryConceptPrefix)) return 2;
21 else return 0;
22 }
23
24}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java
new file mode 100644
index 0000000..03d2b67
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java
@@ -0,0 +1,95 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.io.FileNotFoundException;
4import java.io.IOException;
5import java.util.LinkedList;
6import java.util.Map.Entry;
7
8import org.semanticweb.HermiT.model.DLClause;
9import org.semanticweb.karma2.exception.IllegalInputQueryException;
10import org.semanticweb.karma2.model.ConjunctiveQuery;
11import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryParser;
12import uk.ac.ox.cs.pagoda.MyPrefixes;
13import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
14import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
15import uk.ac.ox.cs.pagoda.util.Utility;
16
17public class KarmaQuery {
18
19 StringBuffer queryBuffer;
20
21 public KarmaQuery(String queryText) {
22 LinkedList<String> answerVariables = new LinkedList<String>();
23 DLClause clause = DLClauseHelper.getQuery(queryText, answerVariables);
24 String clauseText = RuleHelper.getText(clause);
25// clauseText = RuleHelper.abbreviateIRI(clauseText).replace(":-", "<-");
26 clauseText = clauseText.replace(":-", "<-");
27 queryBuffer = new StringBuffer();
28
29 clauseText = expandIRI4Arguments(clauseText);
30
31 for (Entry<String, String> entry : MyPrefixes.PAGOdAPrefixes.getPrefixIRIsByPrefixName().entrySet())
32 if (clauseText.contains(entry.getKey())) {
33 if (queryBuffer.length() > 0) queryBuffer.append(',').append(Utility.LINE_SEPARATOR);
34 queryBuffer.append("prefix ").append(entry.getKey()).append(" <").append(entry.getValue()).append(">");
35 }
36 if (queryBuffer.length() > 0) queryBuffer.append(Utility.LINE_SEPARATOR);
37
38 queryBuffer.append("p(");
39 boolean first = true;
40 for (String var: answerVariables) {
41 if (first) first = false;
42 else queryBuffer.append(",");
43
44 queryBuffer.append("?").append(var);
45 }
46 queryBuffer.append(")").append(clauseText.substring(0, clauseText.length() - 1));
47 }
48
49 private String expandIRI4Arguments(String clauseText) {
50 int leftIndex = clauseText.indexOf('('), rightIndex = clauseText.indexOf(')', leftIndex + 1);
51 String argsText, newArgsText;
52 while (leftIndex != -1) {
53 argsText = clauseText.substring(leftIndex + 1, rightIndex);
54 newArgsText = MyPrefixes.PAGOdAPrefixes.expandText(argsText);
55 clauseText = clauseText.replace(argsText, newArgsText);
56
57 rightIndex += newArgsText.length() - argsText.length();
58 leftIndex = clauseText.indexOf('(', rightIndex + 1);
59 rightIndex = clauseText.indexOf(')', leftIndex + 1);
60 }
61
62 return clauseText;
63 }
64
65 public ConjunctiveQuery getConjunctiveQuery() {
66 ConjunctiveQuery cq = null;
67 try {
68 cq = new ConjunctiveQueryParser(toString()).parse();
69 } catch (FileNotFoundException e) {
70 // TODO Auto-generated catch block
71 e.printStackTrace();
72 } catch (IllegalInputQueryException e) {
73 // TODO Auto-generated catch block
74 e.printStackTrace();
75 } catch (IOException e) {
76 // TODO Auto-generated catch block
77 e.printStackTrace();
78 } catch (Exception e) {
79 Utility.logDebug("The query cannot be properly handled by KARMA.");
80 return null;
81 }
82 return cq;
83 }
84
85 @Override
86 public String toString() {
87 return queryBuffer.toString();
88 }
89
90 static String sample = "prefix P0: <http://swat.cse.lehigh.edu/onto/univ-bench.owl#>, " +
91 "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>, " +
92 "prefix owl: <http://www.w3.org/2002/07/owl#>" +
93 "q(?0) <- owl:Thing(?0), P0:Person(?0)";
94
95}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
new file mode 100644
index 0000000..98f0c35
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
@@ -0,0 +1,109 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.karma2.MyKarma;
4import org.semanticweb.karma2.clausifier.OntologyProcesser;
5import org.semanticweb.karma2.exception.IllegalInputOntologyException;
6import org.semanticweb.karma2.model.ConjunctiveQuery;
7import org.semanticweb.owlapi.model.OWLOntology;
8import uk.ac.ox.cs.JRDFox.JRDFStoreException;
9import uk.ac.ox.cs.JRDFox.store.DataStore;
10import uk.ac.ox.cs.pagoda.query.AnswerTuple;
11import uk.ac.ox.cs.pagoda.query.AnswerTuples;
12import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp;
13import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
14import uk.ac.ox.cs.pagoda.util.Utility;
15import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
16
17import java.io.File;
18import java.io.FileNotFoundException;
19import java.nio.file.Paths;
20import java.util.Set;
21
22public class KarmaQueryEngine extends RDFoxQueryEngine {
23
24 String karmaDataFile = null, karmaRuleFile = null;
25 private MyKarma reasoner = null;
26
27 public KarmaQueryEngine(String name) {
28 super(name);
29
30// int Base = 1 << 6;
31// int index = (new Random().nextInt() % Base + Base) % Base;
32// karmaDataFile = "karma_data" + index + ".ttl";
33// karmaRuleFile = "karma_rule" + index + ".dlog";
34 karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString();
35 karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString();
36
37 reasoner = new MyKarma();
38 }
39
40 public MyKarma getReasoner() {
41 if(isDisposed()) throw new DisposedException();
42 return reasoner;
43 }
44
45 public void processOntology(OWLOntology elhoOntology) {
46 if(isDisposed()) throw new DisposedException();
47 try {
48 OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile));
49 } catch(IllegalInputOntologyException e) {
50 e.printStackTrace();
51 }
52 }
53
54 @Override
55 public void dispose() {
56 super.dispose();
57 reasoner.dispose();
58 }
59
60 @Override
61 public AnswerTuples evaluate(String queryText) {
62 if(isDisposed()) throw new DisposedException();
63 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null);
64 }
65
66 @Override
67 public AnswerTuples evaluate(String queryText, String[] answerVars) {
68 if(isDisposed()) throw new DisposedException();
69 return evaluate(queryText, answerVars, null);
70 }
71
72 public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) {
73 if(isDisposed()) throw new DisposedException();
74 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples);
75 }
76
77 public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) {
78 if(isDisposed()) throw new DisposedException();
79 KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?"));
80 reasoner.setConcurrence(false);
81 ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery();
82 if(cq == null) return null;
83 Set<AnswerTuple> answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:"));
84 return new AnswerTuplesImp(answerVars, answers);
85 }
86
87 @Override
88 public DataStore getDataStore() {
89 if(isDisposed()) throw new DisposedException();
90 return reasoner.getStore();
91 }
92
93 public void initialiseKarma() {
94 if(isDisposed()) throw new DisposedException();
95 try {
96 reasoner.initializeData(new File(karmaDataFile));
97 reasoner.materialise(new File(karmaRuleFile));
98
99 File tmp;
100 if(karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete();
101 if(karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete();
102 } catch(FileNotFoundException e) {
103 e.printStackTrace();
104 } catch(JRDFStoreException e) {
105 e.printStackTrace();
106 }
107 }
108
109}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java
new file mode 100644
index 0000000..f823232
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java
@@ -0,0 +1,117 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.HermiT.model.Constant;
4import org.semanticweb.HermiT.model.Individual;
5import org.semanticweb.HermiT.model.Term;
6import uk.ac.ox.cs.JRDFox.JRDFStoreException;
7import uk.ac.ox.cs.JRDFox.model.GroundTerm;
8import uk.ac.ox.cs.JRDFox.store.TupleIterator;
9import uk.ac.ox.cs.pagoda.query.AnswerTuple;
10import uk.ac.ox.cs.pagoda.query.AnswerTuples;
11import uk.ac.ox.cs.pagoda.util.Utility;
12import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
13
14public class RDFoxAnswerTuples extends AnswerTuples {
15
16 long multi;
17 TupleIterator m_iter;
18 String[] m_answerVars;
19
20 public RDFoxAnswerTuples(String[] answerVars, TupleIterator iter) {
21 m_answerVars = answerVars;
22 m_iter = iter;
23 reset();
24 }
25
26 public static Term getHermitTerm(GroundTerm t) {
27 if(t instanceof uk.ac.ox.cs.JRDFox.model.Individual) {
28 uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t;
29 return Individual.create(individual.getIRI());
30 }
31 else {
32 uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t);
33 return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI());
34 }
35 }
36
37 @Override
38 public boolean isValid() {
39 if(isDisposed()) throw new DisposedException();
40
41 return multi != 0;
42 }
43
44 @Override
45 public int getArity() {
46 if(isDisposed()) throw new DisposedException();
47
48 try {
49 return m_iter.getArity();
50 } catch (JRDFStoreException e) {
51 e.printStackTrace();
52 return -1;
53 }
54 }
55
56 @Override
57 public void moveNext() {
58 if(isDisposed()) throw new DisposedException();
59
60 try {
61 multi = m_iter.getNext();
62 } catch (JRDFStoreException e) {
63 e.printStackTrace();
64 }
65 }
66
67 @Override
68 public void dispose() {
69 super.dispose();
70 m_iter.dispose();
71 }
72
73 @Override
74 public AnswerTuple getTuple() {
75 if(isDisposed()) throw new DisposedException();
76
77 return new AnswerTuple(m_iter, m_answerVars.length);
78 }
79
80 @Override
81 public void reset() {
82 if(isDisposed()) throw new DisposedException();
83
84 try {
85 multi = m_iter.open();
86 } catch (JRDFStoreException e) {
87 e.printStackTrace();
88 }
89 }
90
91 @Override
92 public boolean contains(AnswerTuple t) {
93 if(isDisposed()) throw new DisposedException();
94
95 Utility.logError("Unsupported operation in RDFoxAnswerTuples");
96 return false;
97 }
98
99 @Override
100 public void remove() {
101 if(isDisposed()) throw new DisposedException();
102
103 Utility.logError("Unsupported operation in RDFoxAnswerTuples");
104 }
105
106 @Override
107 public String[] getAnswerVariables() {
108 if(isDisposed()) throw new DisposedException();
109
110 return m_answerVars;
111 }
112
113 protected void finalize() {
114 m_iter.dispose();
115 }
116
117}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
new file mode 100644
index 0000000..f19f253
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
@@ -0,0 +1,139 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.owlapi.model.OWLOntology;
4import org.semanticweb.owlapi.model.OWLOntologyCreationException;
5import org.semanticweb.owlapi.model.parameters.Imports;
6import uk.ac.ox.cs.JRDFox.JRDFStoreException;
7import uk.ac.ox.cs.JRDFox.Prefixes;
8import uk.ac.ox.cs.JRDFox.store.DataStore;
9import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType;
10import uk.ac.ox.cs.pagoda.MyPrefixes;
11import uk.ac.ox.cs.pagoda.query.AnswerTuples;
12import uk.ac.ox.cs.pagoda.reasoner.QueryEngine;
13import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
14import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter;
15import uk.ac.ox.cs.pagoda.util.Timer;
16import uk.ac.ox.cs.pagoda.util.Utility;
17import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
18
19import java.io.File;
20import java.util.Collection;
21
22public abstract class RDFoxQueryEngine extends QueryEngine {
23
24 public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2;
25 protected String name;
26 protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
27
28 public RDFoxQueryEngine(String name) {
29 this.name = name;
30 }
31
32 public static DataStore createDataStore() {
33 DataStore instance = null;
34 try {
35// instance = new DataStore("par-head-n");
36 instance = new DataStore(StoreType.NarrowParallelHead);
37 instance.setNumberOfThreads(matNoOfThreads);
38 instance.initialize();
39 } catch(JRDFStoreException e) {
40 e.printStackTrace();
41 }
42 return instance;
43 }
44
45 public String getName() {
46 if(isDisposed()) throw new DisposedException();
47 return name;
48 }
49
50 public abstract DataStore getDataStore();
51
52 public void importRDFData(String fileName, String importedFile) {
53 if(isDisposed()) throw new DisposedException();
54 if(importedFile == null || importedFile.isEmpty()) return;
55 Timer t = new Timer();
56 DataStore store = getDataStore();
57 try {
58 long oldTripleCount = store.getTriplesCount(), tripleCount;
59 for(String file : importedFile.split(QueryReasoner.ImportDataFileSeparator)) {
60 store.importTurtleFile(new File(file), prefixes);
61 }
62 tripleCount = store.getTriplesCount();
63 Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
64 store.clearRulesAndMakeFactsExplicit();
65 } catch(JRDFStoreException e) {
66 e.printStackTrace();
67 }
68 Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds.");
69 }
70
71 public void importDataFromABoxOf(OWLOntology ontology) {
72 if(isDisposed()) throw new DisposedException();
73 DataStore store = getDataStore();
74 try {
75 long prevTriplesCount = store.getTriplesCount();
76 store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(Imports.INCLUDED)));
77 long loadedTriples = store.getTriplesCount() - prevTriplesCount;
78 Utility.logDebug(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(Imports.INCLUDED)
79 .size() + " ABox axioms");
80 } catch(JRDFStoreException | OWLOntologyCreationException e) {
81 e.printStackTrace();
82 System.exit(1);
83 }
84
85 }
86
87 public void materialise(String programName, String programText) {
88 if(isDisposed()) throw new DisposedException();
89 if(programText == null) return;
90 Timer t = new Timer();
91 DataStore store = getDataStore();
92 try {
93 long oldTripleCount = store.getTriplesCount(), tripleCount;
94// store.addRules(new String[] {programText});
95 store.importRules(programText);
96 store.applyReasoning();
97 tripleCount = store.getTriplesCount();
98 Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
99 store.clearRulesAndMakeFactsExplicit();
100 } catch(JRDFStoreException e) {
101 e.printStackTrace();
102 }
103 Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds.");
104 }
105
106 @Override
107 public void evaluate(Collection<String> queryTexts, String answerFile) {
108 if(isDisposed()) throw new DisposedException();
109 if(queryTexts == null)
110 return;
111
112 int queryID = 0;
113 AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile);
114 AnswerTuples answerTuples;
115 Timer t = new Timer();
116 try {
117 for(String query : queryTexts) {
118 t.reset();
119 answerTuples = null;
120 try {
121 answerTuples = evaluate(query);
122 Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration());
123 answerWriter.write(answerTuples.getAnswerVariables(), answerTuples);
124 } finally {
125 if(answerTuples != null) answerTuples.dispose();
126 }
127 }
128 } finally {
129 answerWriter.close();
130 }
131
132 Utility.logDebug("done computing query answers by RDFox.");
133 }
134
135 @Override
136 public void dispose() {
137 super.dispose();
138 }
139}
diff --git a/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
new file mode 100644
index 0000000..62885be
--- /dev/null
+++ b/src/main/java/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
@@ -0,0 +1,269 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import net.sf.ehcache.Cache;
4import net.sf.ehcache.CacheManager;
5import net.sf.ehcache.Element;
6import org.semanticweb.HermiT.model.*;
7import uk.ac.ox.cs.JRDFox.JRDFStoreException;
8import uk.ac.ox.cs.JRDFox.model.Datatype;
9import uk.ac.ox.cs.JRDFox.model.GroundTerm;
10import uk.ac.ox.cs.JRDFox.store.DataStore;
11import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
12import uk.ac.ox.cs.JRDFox.store.Dictionary;
13import uk.ac.ox.cs.JRDFox.store.Resource;
14import uk.ac.ox.cs.pagoda.owl.OWLHelper;
15import uk.ac.ox.cs.pagoda.util.Namespace;
16
17import java.util.Collection;
18import java.util.HashMap;
19import java.util.Map;
20
21public class RDFoxTripleManager {
22
23 private final Cache termsCache;
24 private static final int TERMS_CACHE_SIZE = 10000;
25 private static final int CACHE_TTL_DEFAULT = 0;
26 private static final int CACHE_TTI_DEFAULT = 0;
27 private static final boolean CACHE_ETERNAL = true;
28 private static final boolean CACHE_USE_DISK = false;
29
30 UpdateType m_incrementally;
31// boolean m_incrementally;
32
33 DataStore m_store;
34 Dictionary m_dict;
35// Set<Atom> triplesByTerm = new HashSet<Atom>();
36
37 public RDFoxTripleManager(DataStore store, boolean incrementally) {
38 m_store = store;
39// m_incrementally = incrementally;
40
41 CacheManager cacheManager = CacheManager.getInstance();
42 String cacheName = "RDFoxTripleManager_" + store.hashCode();
43 if(! cacheManager.cacheExists(cacheName)) {
44 termsCache = new Cache(cacheName,
45 TERMS_CACHE_SIZE, CACHE_USE_DISK, CACHE_ETERNAL,
46 CACHE_TTL_DEFAULT, CACHE_TTI_DEFAULT);
47 cacheManager.addCache(termsCache);
48 }
49 else
50 termsCache = cacheManager.getCache(cacheName);
51
52 if (incrementally)
53 m_incrementally = UpdateType.ScheduleForAddition;
54 else
55 m_incrementally = UpdateType.Add;
56
57 try {
58 m_dict = store.getDictionary();
59 resourceID = m_dict.resolveResources(
60 new String[] {Namespace.RDF_TYPE, Namespace.EQUALITY, Namespace.INEQUALITY},
61 new int[] {Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value()}
62 );
63 } catch (JRDFStoreException e) {
64 e.printStackTrace();
65 }
66 }
67
68 public boolean isRdfTypeID(int id) {
69 return id == resourceID[0];
70 }
71
72 public void addTripleByID(int[] tuple) {
73// System.out.println(getRawTerm(tuple[0]) + " " + getRawTerm(tuple[1]) + " " + getRawTerm(tuple[2]) + " .");
74 try {
75// Resource[] rsc = new Resource[3];
76// m_dict.getResources(tuple, 0, 3, rsc);
77//
78// GroundTerm[] terms = new GroundTerm[3];
79// for (int i = 0; i < 3; ++i)
80// terms[i] = uk.ac.ox.cs.JRDFox.model.Individual.create(rsc[i].m_lexicalForm);
81// m_store.addTriples(terms, m_incrementally);
82
83 m_store.addTriplesByResourceIDs(tuple, m_incrementally);
84 } catch (JRDFStoreException e) {
85 e.printStackTrace();
86 }
87 }
88
89 public void addTripleByTerm(Atom atom) {
90 try {
91 m_store.addTriples(getRDFoxTriple(atom), m_incrementally);
92 } catch (JRDFStoreException e) {
93 e.printStackTrace();
94 }
95 }
96
97 public void removeTripleByTermIncrementally(Atom atom) {
98 try {
99 m_store.addTriples(getRDFoxTriple(atom), UpdateType.ScheduleForDeletion);
100 } catch (JRDFStoreException e) {
101 e.printStackTrace();
102 }
103 }
104
105 public static GroundTerm[] getRDFoxTriple(Atom instance) {
106 if (instance.getArity() == 1)
107 return new GroundTerm[] {
108 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
109 uk.ac.ox.cs.JRDFox.model.Individual.RDF_TYPE,
110 uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicConcept) instance.getDLPredicate()).getIRI()) };
111 else if (instance.getDLPredicate() instanceof Equality || instance.getDLPredicate() instanceof AnnotatedEquality)
112 return new GroundTerm[] {
113 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
114 uk.ac.ox.cs.JRDFox.model.Individual.SAME_AS,
115 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
116 else if (instance.getDLPredicate() instanceof Inequality)
117 return new GroundTerm[] {
118 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
119 uk.ac.ox.cs.JRDFox.model.Individual.DIFFERENT_FROM,
120 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
121 else
122 return new GroundTerm[] {
123 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
124 uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicRole) instance.getDLPredicate()).getIRI()),
125 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
126 }
127
128 int[] resourceID; // rdf:type, owl:sameAs, owl:differentFrom
129
130 public int[] getInstance(Atom atom, Map<Variable, Integer> assignment) {
131 DLPredicate p = atom.getDLPredicate();
132 if (p instanceof Equality || p instanceof AnnotatedEquality)
133 return new int[] {
134 getResourceID(atom.getArgument(0), assignment),
135 resourceID[1],
136 getResourceID(atom.getArgument(1), assignment)
137 };
138 else if (p instanceof Inequality)
139 return new int[] {
140 getResourceID(atom.getArgument(0), assignment),
141 resourceID[2],
142 getResourceID(atom.getArgument(1), assignment)
143 };
144 else if (atom.getArity() == 1)
145 return new int[] {
146 getResourceID(atom.getArgument(0), assignment),
147 resourceID[0],
148 getResourceID(p)
149 };
150 else
151 return new int[] {
152 getResourceID(atom.getArgument(0), assignment),
153 getResourceID(p),
154 getResourceID(atom.getArgument(1), assignment)
155 };
156 }
157
158 public String getRawTerm(int id) {
159 Resource[] res = new Resource[1];
160 try {
161 m_dict.getResources(new int[] {id}, 0, 1, res);
162 } catch (JRDFStoreException e) {
163 e.printStackTrace();
164 }
165 return getQuotedTerm(res[0]);
166 }
167
168 Map<String, Integer> predicateCache = new HashMap<String, Integer>();
169
170 public int getResourceID(DLPredicate p) {
171 Integer id;
172 String name = p instanceof AtomicConcept ? ((AtomicConcept) p).getIRI() : ((AtomicRole) p).getIRI();
173 if ((id = predicateCache.get(name)) != null) return id;
174 try {
175 predicateCache.put(name, id = resolveResource(name, Datatype.IRI_REFERENCE.value()));
176
177 } catch (JRDFStoreException e) {
178 e.printStackTrace();
179 }
180 return id;
181 }
182
183 public int getResourceID(String name) {
184 Integer id = null;
185 try {
186 id = resolveResource(name, Datatype.IRI_REFERENCE.value());
187 } catch (JRDFStoreException e) {
188 e.printStackTrace();
189 }
190 return id;
191 }
192
193 private int resolveResource(String name, int type) throws JRDFStoreException {
194 String[] lexicalForms = new String[] {name};
195 int[] types = new int[] {type};
196 return m_dict.resolveResources(lexicalForms, types)[0];
197 }
198
199// Map<Term, Integer> termCache = new HashMap<Term, Integer>();
200// Queue<Term> termQueue = new LinkedList<Term>();
201
202 private int getResourceID(Term arg, Map<Variable, Integer> assignment) {
203 if (arg instanceof Variable) return assignment.get(arg);
204 int id = -1;
205 if(termsCache.isKeyInCache(arg))
206 return ((int) termsCache.get(arg).getObjectValue());
207
208// if (arg instanceof Individual) {
209 try {
210 if (arg instanceof Individual)
211 termsCache.put(new Element(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())));
212 else if (arg instanceof Constant)
213 termsCache.put(new Element(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))));
214
215 } catch (JRDFStoreException e) {
216 e.printStackTrace();
217 System.exit(1);
218 }
219// }
220
221 return id;
222 }
223
224 private static int getDatatypeID(String uri) {
225 if (uri.equals("http://www.w3.org/2001/XMLSchema#string")) return Datatype.XSD_STRING.value();
226 if (uri.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral")) return Datatype.RDF_PLAIN_LITERAL.value();
227 if (uri.equals("http://www.w3.org/2001/XMLSchema#integer")) return Datatype.XSD_INTEGER.value();
228 if (uri.equals("http://www.w3.org/2001/XMLSchema#float")) return Datatype.XSD_FLOAT.value();
229 if (uri.equals("http://www.w3.org/2001/XMLSchema#double")) return Datatype.XSD_DOUBLE.value();
230 if (uri.equals("http://www.w3.org/2001/XMLSchema#boolean")) return Datatype.XSD_BOOLEAN.value();
231 if (uri.equals("http://www.w3.org/2001/XMLSchema#dateTime")) return Datatype.XSD_DATE_TIME.value();
232 if (uri.equals("http://www.w3.org/2001/XMLSchema#time")) return Datatype.XSD_TIME.value();
233 if (uri.equals("http://www.w3.org/2001/XMLSchema#date")) return Datatype.XSD_DATE.value();
234 if (uri.equals("http://www.w3.org/2001/XMLSchema#gYearMonth")) return Datatype.XSD_G_YEAR_MONTH.value();
235 if (uri.equals("http://www.w3.org/2001/XMLSchema#gYear")) return Datatype.XSD_G_YEAR.value();
236 if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonthDay")) return Datatype.XSD_G_MONTH_DAY.value();
237 if (uri.equals("http://www.w3.org/2001/XMLSchema#gDay")) return Datatype.XSD_G_DAY.value();
238 if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonth")) return Datatype.XSD_G_MONTH.value();
239 if (uri.equals("http://www.w3.org/2001/XMLSchema#duration")) return Datatype.XSD_DURATION.value();
240
241 return -1;
242 }
243
244 public int[] getResourceIDs(Collection<uk.ac.ox.cs.JRDFox.model.Individual> individuals) {
245 String[] str = new String[individuals.size()];
246 int[] types = new int[individuals.size()];
247 int index = 0;
248 for (uk.ac.ox.cs.JRDFox.model.Individual individual : individuals) {
249 types[index] = Datatype.IRI_REFERENCE.value();
250 str[index++] = individual.getIRI();
251 }
252
253 try {
254 return m_dict.resolveResources(str, types);
255 } catch (JRDFStoreException e) {
256 e.printStackTrace();
257 return null;
258 }
259 }
260
261 public static String getQuotedTerm(Resource r) {
262 if (r.m_datatype.equals(Datatype.IRI_REFERENCE))
263 return OWLHelper.addAngles(r.m_lexicalForm);
264 if (r.m_datatype.equals(Datatype.XSD_STRING) || r.m_datatype.equals(Datatype.RDF_PLAIN_LITERAL))
265 return "\"" + r.m_lexicalForm + "\"";
266 else
267 return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">";
268 }
269}