aboutsummaryrefslogtreecommitdiff
path: root/src/uk/ac/ox/cs/pagoda/reasoner/light
diff options
context:
space:
mode:
authoryzhou <yujiao.zhou@gmail.com>2015-04-21 10:34:27 +0100
committeryzhou <yujiao.zhou@gmail.com>2015-04-21 10:34:27 +0100
commit9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8 (patch)
tree47511c0fb89dccff0db4b5990522e04f294d795b /src/uk/ac/ox/cs/pagoda/reasoner/light
parentb1ac207612ee8b045244253fb94b866104bc34f2 (diff)
downloadACQuA-9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8.tar.gz
ACQuA-9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8.zip
initial version
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner/light')
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java366
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java24
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java95
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java98
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java100
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java110
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java249
7 files changed, 1042 insertions, 0 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java
new file mode 100644
index 0000000..3207ff1
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java
@@ -0,0 +1,366 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.util.Arrays;
4import java.util.Collection;
5import java.util.HashSet;
6import java.util.Iterator;
7import java.util.Set;
8
9import org.semanticweb.HermiT.model.DLClause;
10
11import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
12import uk.ac.ox.cs.pagoda.query.AnswerTuples;
13import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
14import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
15import uk.ac.ox.cs.pagoda.rules.Program;
16import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
17import uk.ac.ox.cs.pagoda.util.Namespace;
18import uk.ac.ox.cs.pagoda.util.Timer;
19import uk.ac.ox.cs.pagoda.util.UFS;
20import uk.ac.ox.cs.pagoda.util.Utility;
21import uk.ac.ox.cs.JRDFox.JRDFStoreException;
22import uk.ac.ox.cs.JRDFox.store.DataStore;
23import uk.ac.ox.cs.JRDFox.store.Parameters;
24import uk.ac.ox.cs.JRDFox.store.TripleStatus;
25import uk.ac.ox.cs.JRDFox.store.TupleIterator;
26import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
27
28public class BasicQueryEngine extends RDFoxQueryEngine {
29
30 protected DataStore store;
31 protected Parameters parameters = new Parameters();
32
33 public BasicQueryEngine(String name) {
34 super(name);
35 store = RDFoxQueryEngine.createDataStore();
36 parameters.m_allAnswersInRoot = true;
37 parameters.m_useBushy = true;
38 }
39
40 public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) {
41 if (gap != null) {
42 materialise("lower program", dProgram.getLower().toString());
43 String program = dProgram.getUpper().toString();
44 try {
45 gap.compile(program);
46 gap.addBackTo();
47 getDataStore().clearRulesAndMakeFactsExplicit();
48 } catch (JRDFStoreException e) {
49 e.printStackTrace();
50 gap.clear();
51 } finally {
52 }
53 }
54 else
55 materialise("upper program", dProgram.getUpper().toString());
56 }
57
58 public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) {
59 if (gap != null) {
60 materialise("lower program", dProgram.getLower().toString());
61 String program = dProgram.getUpper().toString();
62 try {
63 gap.compile(program);
64 gap.addBackTo();
65 getDataStore().clearRulesAndMakeFactsExplicit();
66 } catch (JRDFStoreException e) {
67 e.printStackTrace();
68 } finally {
69 gap.clear();
70 }
71 }
72 else
73 materialise("upper program", dProgram.getUpper().toString());
74
75 return 1;
76 }
77
78 @Override
79 public AnswerTuples evaluate(String queryText) {
80 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]);
81 }
82
83 @Override
84 public AnswerTuples evaluate(String queryText, String[] answerVars) {
85 TupleIterator tupleIterator;
86 try {
87 tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters);
88 } catch (JRDFStoreException e) {
89 e.printStackTrace();
90 return null;
91 }
92 return new RDFoxAnswerTuples(answerVars, tupleIterator);
93 }
94
95 @Override
96 public DataStore getDataStore() {
97 return store;
98 }
99
100 @Override
101 public void dispose() {
102 store.dispose();
103 }
104
105 protected void outputClassAssertions(String filename) {
106 TupleIterator allTuples = null;
107 boolean redirect = false;
108 try {
109 allTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters);
110 redirect = Utility.redirectCurrentOut(filename);
111 for (long multi = allTuples.open(); multi != 0; multi = allTuples.getNext())
112 System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager.getQuotedTerm(allTuples.getResource(1)));
113 } catch (JRDFStoreException e) {
114 e.printStackTrace();
115 } finally {
116 if (redirect) Utility.closeCurrentOut();
117 if (allTuples != null) allTuples.dispose();
118 }
119 }
120
121 public void outputInstance4BinaryPredicate(String iri, String filename) {
122 Utility.redirectCurrentOut(filename);
123 outputInstance4BinaryPredicate(iri);
124 Utility.closeCurrentOut();
125 }
126
127 public void outputInstance4BinaryPredicate(String iri) {
128 outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }");
129 }
130
131 public void outputInstanceNumbers(String filename) {
132 TupleIterator predicateTuples = null;
133 TupleIterator instanceTuples;
134 Set<String> number = new HashSet<String>();
135 String predicate;
136 try {
137 predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters);
138 for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) {
139 predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0));
140 instanceTuples = null;
141 try {
142 instanceTuples = getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters);
143 long totalCount = 0;
144 for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) {
145 totalCount += instanceTuples.getMultiplicity();
146 }
147 number.add(predicate + " * " + totalCount);
148 } finally {
149 if (instanceTuples != null) instanceTuples.dispose();
150 }
151 }
152
153 predicateTuples.dispose();
154
155 predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters);
156 for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) {
157 predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0));
158 instanceTuples = null;
159 try {
160 instanceTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters);
161 ;
162 long totalCount = 0;
163 for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext())
164 totalCount += instanceTuples.getMultiplicity();
165 number.add(predicate + " * " + totalCount);
166 } finally {
167 if (instanceTuples != null) instanceTuples.dispose();
168 }
169 }
170
171 } catch (JRDFStoreException e) {
172 e.printStackTrace();
173 } finally {
174 if (predicateTuples != null) predicateTuples.dispose();
175 }
176
177 Utility.redirectCurrentOut(filename);
178 String[] ordered = number.toArray(new String[0]);
179 Arrays.sort(ordered, new DLPredicateComparator());
180 for (String line: ordered) System.out.println(line);
181 Utility.closeCurrentOut();
182
183 }
184
185 public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException {
186 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB);
187 iter.open();
188 return iter;
189 }
190
191 public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException {
192 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters);
193 iter.open();
194 return iter;
195 }
196
197 public void setExpandEquality(boolean flag) {
198 parameters.m_expandEquality = flag;
199 }
200
201 public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException {
202 parameters.m_expandEquality = false;
203 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters);
204 iter.open();
205 parameters.m_expandEquality = true;
206 return iter;
207 }
208
209
210 public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException {
211 return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText);
212 }
213
214 Set<DLClause> materialisedRules = new HashSet<DLClause>();
215
216 public String getUnusedRules(Collection<DLClause> clauses, boolean toUpdate) {
217 DLClause clause;
218 for (Iterator<DLClause> iter = clauses.iterator(); iter.hasNext(); ) {
219 if (materialisedRules.contains(clause = iter.next()))
220 iter.remove();
221 else if (toUpdate) materialisedRules.add(clause);
222 }
223
224 if (clauses.isEmpty()) return null;
225
226 return Program.toString(clauses);
227 }
228
229 public void outputMaterialisedRules() {
230 System.out.println(DLClauseHelper.toString(materialisedRules));
231 }
232
233 public void outputAnswers(String query) {
234 TupleIterator iter = null;
235 try {
236 iter = internal_evaluate(query);
237 System.out.println(query);
238 int arity = iter.getArity();
239 for (long multi = iter.open(); multi != 0; multi = iter.getNext()) {
240 for (int i = 0; i < arity; ++i)
241 System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t");
242 System.out.println();
243 }
244 } catch (JRDFStoreException e) {
245 e.printStackTrace();
246 } finally {
247 if (iter != null) iter.dispose();
248 }
249 }
250
251 public void outputInstance4UnaryPredicate(String iri) {
252 outputAnswers("select ?x where { ?x "
253 + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <"
254 + iri
255 + "> .}");
256 }
257
258 public void outputSubjects(String p, String o) {
259 outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }");
260 }
261
262 public void outputObjects(String s, String p) {
263 outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }");
264 }
265
266 public void outputIDBFacts() {
267 TupleIterator iter = null;
268 try {
269 iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }");
270 for (long multi = iter.open(); multi != 0; multi = iter.getNext()) {
271 for (int i = 0; i < 3; ++i)
272 System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t");
273 System.out.println();
274 }
275 } catch (JRDFStoreException e) {
276 // TODO Auto-generated catch block
277 e.printStackTrace();
278 } finally {
279 if (iter != null) iter.dispose();
280 }
281
282 }
283
284 public void outputType4Individual(String iri) {
285 outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }");
286 }
287
288 public int getSameAsNumber() {
289 TupleIterator iter = null;
290 int counter = 0;
291 try {
292 iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }");
293 for (long multi = iter.open(); multi != 0; multi = iter.getNext())
294 if (iter.getResourceID(0) != iter.getResourceID(1))
295 ++counter;
296 } catch (JRDFStoreException e) {
297 e.printStackTrace();
298 } finally {
299 if (iter != null) iter.dispose();
300 }
301 return counter;
302 }
303
304 private UFS<String> equalityGroups = null;
305
306 public UFS<String> getEqualityGroups() {
307 if (equalityGroups != null) return equalityGroups;
308
309 equalityGroups = new UFS<String>();
310
311 TupleIterator answers = null;
312 try {
313 Timer t = new Timer();
314 answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }");
315 for (long multi = answers.open(); multi != 0; multi = answers.getNext()) {
316 if (answers.getResourceID(0) != answers.getResourceID(1))
317 equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm);
318 }
319 Utility.logInfo("@Time to group individuals by equality: " + t.duration());
320 } catch (JRDFStoreException e) {
321 e.printStackTrace();
322 } finally {
323 if (answers != null) answers.dispose();
324 }
325
326 return equalityGroups;
327 }
328
329 public void clearRulesAndIDBFacts(Collection<int[]> collection) {
330// performDeletion(collection);
331 collection.clear();
332 try {
333 store.clearRulesAndMakeFactsExplicit();
334 } catch (JRDFStoreException e) {
335 e.printStackTrace();
336 }
337 }
338
339 @SuppressWarnings("unused")
340 private void performDeletion(Collection<int[]> collection) {
341 Utility.logInfo("Remove all rules, idb facts and added staff...");
342 Timer timer = new Timer();
343 TupleIterator iter = null;
344 try {
345 UpdateType ut = UpdateType.ScheduleForDeletion;
346 for (int[] t: collection)
347 store.addTriplesByResourceIDs(t, ut);
348
349 iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }");
350 for (long multi = iter.open(); multi != 0; multi = iter.getNext()) {
351 int[] triple = new int[3];
352 for (int i = 0; i < 3; ++i)
353 triple[i] = iter.getResourceID(i);
354 store.addTriplesByResourceIDs(triple, ut);
355 }
356 store.applyReasoning(true);
357 } catch (JRDFStoreException e) {
358 e.printStackTrace();
359 } finally {
360 if (iter != null) iter.dispose();
361 }
362 Utility.logInfo("Time for deletion: " + timer.duration());
363 }
364
365
366}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
new file mode 100644
index 0000000..c22902c
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
@@ -0,0 +1,24 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.util.Comparator;
4
5import uk.ac.ox.cs.pagoda.multistage.Normalisation;
6import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
7
8public class DLPredicateComparator implements Comparator<String> {
9
10 @Override
11 public int compare(String arg0, String arg1) {
12 int ret = type(arg0) - type(arg1);
13 if (ret != 0) return ret;
14
15 return arg0.compareTo(arg1);
16 }
17
18 private int type(String p) {
19 if (p.contains(OverApproxExist.negativeSuffix)) return 1;
20 if (p.contains(Normalisation.auxiliaryConceptPrefix)) return 2;
21 else return 0;
22 }
23
24}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java
new file mode 100644
index 0000000..03d2b67
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java
@@ -0,0 +1,95 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.io.FileNotFoundException;
4import java.io.IOException;
5import java.util.LinkedList;
6import java.util.Map.Entry;
7
8import org.semanticweb.HermiT.model.DLClause;
9import org.semanticweb.karma2.exception.IllegalInputQueryException;
10import org.semanticweb.karma2.model.ConjunctiveQuery;
11import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryParser;
12import uk.ac.ox.cs.pagoda.MyPrefixes;
13import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
14import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
15import uk.ac.ox.cs.pagoda.util.Utility;
16
17public class KarmaQuery {
18
19 StringBuffer queryBuffer;
20
21 public KarmaQuery(String queryText) {
22 LinkedList<String> answerVariables = new LinkedList<String>();
23 DLClause clause = DLClauseHelper.getQuery(queryText, answerVariables);
24 String clauseText = RuleHelper.getText(clause);
25// clauseText = RuleHelper.abbreviateIRI(clauseText).replace(":-", "<-");
26 clauseText = clauseText.replace(":-", "<-");
27 queryBuffer = new StringBuffer();
28
29 clauseText = expandIRI4Arguments(clauseText);
30
31 for (Entry<String, String> entry : MyPrefixes.PAGOdAPrefixes.getPrefixIRIsByPrefixName().entrySet())
32 if (clauseText.contains(entry.getKey())) {
33 if (queryBuffer.length() > 0) queryBuffer.append(',').append(Utility.LINE_SEPARATOR);
34 queryBuffer.append("prefix ").append(entry.getKey()).append(" <").append(entry.getValue()).append(">");
35 }
36 if (queryBuffer.length() > 0) queryBuffer.append(Utility.LINE_SEPARATOR);
37
38 queryBuffer.append("p(");
39 boolean first = true;
40 for (String var: answerVariables) {
41 if (first) first = false;
42 else queryBuffer.append(",");
43
44 queryBuffer.append("?").append(var);
45 }
46 queryBuffer.append(")").append(clauseText.substring(0, clauseText.length() - 1));
47 }
48
49 private String expandIRI4Arguments(String clauseText) {
50 int leftIndex = clauseText.indexOf('('), rightIndex = clauseText.indexOf(')', leftIndex + 1);
51 String argsText, newArgsText;
52 while (leftIndex != -1) {
53 argsText = clauseText.substring(leftIndex + 1, rightIndex);
54 newArgsText = MyPrefixes.PAGOdAPrefixes.expandText(argsText);
55 clauseText = clauseText.replace(argsText, newArgsText);
56
57 rightIndex += newArgsText.length() - argsText.length();
58 leftIndex = clauseText.indexOf('(', rightIndex + 1);
59 rightIndex = clauseText.indexOf(')', leftIndex + 1);
60 }
61
62 return clauseText;
63 }
64
65 public ConjunctiveQuery getConjunctiveQuery() {
66 ConjunctiveQuery cq = null;
67 try {
68 cq = new ConjunctiveQueryParser(toString()).parse();
69 } catch (FileNotFoundException e) {
70 // TODO Auto-generated catch block
71 e.printStackTrace();
72 } catch (IllegalInputQueryException e) {
73 // TODO Auto-generated catch block
74 e.printStackTrace();
75 } catch (IOException e) {
76 // TODO Auto-generated catch block
77 e.printStackTrace();
78 } catch (Exception e) {
79 Utility.logDebug("The query cannot be properly handled by KARMA.");
80 return null;
81 }
82 return cq;
83 }
84
85 @Override
86 public String toString() {
87 return queryBuffer.toString();
88 }
89
90 static String sample = "prefix P0: <http://swat.cse.lehigh.edu/onto/univ-bench.owl#>, " +
91 "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>, " +
92 "prefix owl: <http://www.w3.org/2002/07/owl#>" +
93 "q(?0) <- owl:Thing(?0), P0:Person(?0)";
94
95}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
new file mode 100644
index 0000000..f70dde9
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
@@ -0,0 +1,98 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.io.File;
4import java.io.FileNotFoundException;
5import java.util.*;
6
7import org.semanticweb.karma2.*;
8import org.semanticweb.karma2.clausifier.OntologyProcesser;
9import org.semanticweb.karma2.exception.IllegalInputOntologyException;
10import org.semanticweb.karma2.model.ConjunctiveQuery;
11import org.semanticweb.owlapi.model.OWLOntology;
12
13import uk.ac.ox.cs.pagoda.query.*;
14import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
15import uk.ac.ox.cs.pagoda.util.Utility;
16import uk.ac.ox.cs.JRDFox.JRDFStoreException;
17import uk.ac.ox.cs.JRDFox.store.DataStore;
18
19public class KarmaQueryEngine extends RDFoxQueryEngine {
20
21 private MyKarma reasoner = null;
22
23 String karmaDataFile = null, karmaRuleFile = null;
24
25 public KarmaQueryEngine(String name) {
26 super(name);
27
28// int Base = 1 << 6;
29// int index = (new Random().nextInt() % Base + Base) % Base;
30// karmaDataFile = "karma_data" + index + ".ttl";
31// karmaRuleFile = "karma_rule" + index + ".dlog";
32 karmaDataFile = Utility.TempDirectory + "karma_data.ttl";
33 karmaRuleFile = Utility.TempDirectory + "karma_rule.dlog";
34
35 reasoner = new MyKarma();
36 }
37
38 public MyKarma getReasoner() {
39 return reasoner;
40 }
41
42 public void processOntology(OWLOntology elhoOntology) {
43 try {
44 OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile));
45 } catch (IllegalInputOntologyException e) {
46 e.printStackTrace();
47 }
48 }
49
50 @Override
51 public void dispose() {
52 reasoner.dispose();
53 }
54
55 @Override
56 public AnswerTuples evaluate(String queryText) {
57 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null);
58 }
59
60 @Override
61 public AnswerTuples evaluate(String queryText, String[] answerVars) {
62 return evaluate(queryText, answerVars, null);
63 }
64
65 public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) {
66 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples);
67 }
68
69 public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) {
70 KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?"));
71 reasoner.setConcurrence(false);
72 ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery();
73 if (cq == null) return null;
74 Set<AnswerTuple> answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:"));
75 return new AnswerTuplesImp(answerVars, answers);
76 }
77
78 @Override
79 public DataStore getDataStore() {
80 return reasoner.getStore();
81 }
82
83 public void initialiseKarma() {
84 try {
85 reasoner.initializeData(new File(karmaDataFile));
86 reasoner.materialise(new File(karmaRuleFile));
87
88 File tmp;
89 if (karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete();
90 if (karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete();
91 } catch (FileNotFoundException e) {
92 e.printStackTrace();
93 } catch (JRDFStoreException e) {
94 e.printStackTrace();
95 }
96 }
97
98}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java
new file mode 100644
index 0000000..dd71809
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java
@@ -0,0 +1,100 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.HermiT.model.Constant;
4import org.semanticweb.HermiT.model.Individual;
5import org.semanticweb.HermiT.model.Term;
6
7import uk.ac.ox.cs.pagoda.query.AnswerTuple;
8import uk.ac.ox.cs.pagoda.query.AnswerTuples;
9import uk.ac.ox.cs.pagoda.util.Utility;
10import uk.ac.ox.cs.JRDFox.JRDFStoreException;
11import uk.ac.ox.cs.JRDFox.model.GroundTerm;
12import uk.ac.ox.cs.JRDFox.store.TupleIterator;
13
14public class RDFoxAnswerTuples implements AnswerTuples {
15
16 long multi;
17 TupleIterator m_iter;
18 String[] m_answerVars;
19
20 public RDFoxAnswerTuples(String[] answerVars, TupleIterator iter) {
21 m_answerVars = answerVars;
22 m_iter = iter;
23 reset();
24 }
25
26 @Override
27 public boolean isValid() {
28 return multi != 0;
29 }
30
31 @Override
32 public int getArity() {
33 try {
34 return m_iter.getArity();
35 } catch (JRDFStoreException e) {
36 e.printStackTrace();
37 return -1;
38 }
39 }
40
41 @Override
42 public void moveNext() {
43 try {
44 multi = m_iter.getNext();
45 } catch (JRDFStoreException e) {
46 e.printStackTrace();
47 }
48 }
49
50 @Override
51 public void dispose() {
52 m_iter.dispose();
53 }
54
55 protected void finalize() {
56 m_iter.dispose();
57 }
58
59 @Override
60 public AnswerTuple getTuple() {
61 return new AnswerTuple(m_iter, m_answerVars.length);
62 }
63
64 @Override
65 public void reset() {
66 try {
67 multi = m_iter.open();
68 } catch (JRDFStoreException e) {
69 e.printStackTrace();
70 }
71 }
72
73 @Override
74 public boolean contains(AnswerTuple t) {
75 Utility.logError("Unsupported operation in RDFoxAnswerTuples");
76 return false;
77 }
78
79 @Override
80 public void remove() {
81 Utility.logError("Unsupported operation in RDFoxAnswerTuples");
82 }
83
84 @Override
85 public String[] getAnswerVariables() {
86 return m_answerVars;
87 }
88
89 public static Term getHermitTerm(GroundTerm t) {
90 if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual) {
91 uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t;
92 return Individual.create(individual.getIRI());
93 }
94 else {
95 uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t);
96 return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI());
97 }
98 }
99
100}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
new file mode 100644
index 0000000..30771ab
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
@@ -0,0 +1,110 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.io.File;
4import java.util.Collection;
5
6import uk.ac.ox.cs.pagoda.MyPrefixes;
7import uk.ac.ox.cs.pagoda.query.AnswerTuples;
8import uk.ac.ox.cs.pagoda.reasoner.QueryEngine;
9import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
10import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter;
11import uk.ac.ox.cs.pagoda.util.Timer;
12import uk.ac.ox.cs.pagoda.util.Utility;
13import uk.ac.ox.cs.JRDFox.JRDFStoreException;
14import uk.ac.ox.cs.JRDFox.Prefixes;
15import uk.ac.ox.cs.JRDFox.store.DataStore;
16import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType;
17
18public abstract class RDFoxQueryEngine implements QueryEngine {
19
20 public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2;
21
22 protected String name;
23 protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
24
25 public RDFoxQueryEngine(String name) {
26 this.name = name;
27 }
28
29 public abstract DataStore getDataStore();
30
31 public abstract void dispose();
32
33 public void importRDFData(String fileName, String importedFile) {
34 if (importedFile == null || importedFile.isEmpty()) return ;
35 Timer t = new Timer();
36 DataStore store = getDataStore();
37 try {
38 long oldTripleCount = store.getTriplesCount(), tripleCount;
39 for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator))
40 store.importTurtleFile(new File(file), prefixes);
41 tripleCount = store.getTriplesCount();
42 Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
43 store.clearRulesAndMakeFactsExplicit();
44 } catch (JRDFStoreException e) {
45 e.printStackTrace();
46 }
47 Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds.");
48 }
49
50 public void materialise(String programName, String programText) {
51 if (programText == null) return ;
52 Timer t = new Timer();
53 DataStore store = getDataStore();
54 try {
55 long oldTripleCount = store.getTriplesCount(), tripleCount;
56// store.addRules(new String[] {programText});
57 store.importRules(programText);
58 store.applyReasoning();
59 tripleCount = store.getTriplesCount();
60 Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
61 store.clearRulesAndMakeFactsExplicit();
62 } catch (JRDFStoreException e) {
63 e.printStackTrace();
64 }
65 Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds.");
66 }
67
68 @Override
69 public void evaluate(Collection<String> queryTexts, String answerFile) {
70 if (queryTexts == null)
71 return ;
72
73 int queryID = 0;
74 AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile);
75 AnswerTuples answerTuples;
76 Timer t = new Timer();
77 try {
78 for (String query: queryTexts) {
79 t.reset();
80 answerTuples = null;
81 try {
82 answerTuples = evaluate(query);
83 Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration());
84 answerWriter.write(answerTuples.getAnswerVariables(), answerTuples);
85 } finally {
86 if (answerTuples != null) answerTuples.dispose();
87 }
88 }
89 } finally {
90 answerWriter.close();
91 }
92
93 Utility.logDebug("done computing query answers by RDFox.");
94
95 }
96
97 public static DataStore createDataStore() {
98 DataStore instance = null;
99 try {
100// instance = new DataStore("par-head-n");
101 instance = new DataStore(StoreType.NarrowParallelHead);
102 instance.setNumberOfThreads(matNoOfThreads);
103 instance.initialize();
104 } catch (JRDFStoreException e) {
105 e.printStackTrace();
106 }
107 return instance;
108 }
109
110}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
new file mode 100644
index 0000000..2280b12
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
@@ -0,0 +1,249 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.util.Collection;
4import java.util.HashMap;
5import java.util.HashSet;
6import java.util.LinkedList;
7import java.util.Map;
8import java.util.Queue;
9import java.util.Set;
10
11import org.semanticweb.HermiT.model.AnnotatedEquality;
12import org.semanticweb.HermiT.model.Atom;
13import org.semanticweb.HermiT.model.AtomicConcept;
14import org.semanticweb.HermiT.model.AtomicRole;
15import org.semanticweb.HermiT.model.Constant;
16import org.semanticweb.HermiT.model.DLPredicate;
17import org.semanticweb.HermiT.model.Equality;
18import org.semanticweb.HermiT.model.Individual;
19import org.semanticweb.HermiT.model.Inequality;
20import org.semanticweb.HermiT.model.Term;
21import org.semanticweb.HermiT.model.Variable;
22
23import uk.ac.ox.cs.pagoda.owl.OWLHelper;
24import uk.ac.ox.cs.pagoda.util.Namespace;
25import uk.ac.ox.cs.JRDFox.JRDFStoreException;
26import uk.ac.ox.cs.JRDFox.model.GroundTerm;
27import uk.ac.ox.cs.JRDFox.store.DataStore;
28import uk.ac.ox.cs.JRDFox.model.Datatype;
29import uk.ac.ox.cs.JRDFox.store.Dictionary;
30import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
31import uk.ac.ox.cs.JRDFox.store.Resource;
32
33public class RDFoxTripleManager {
34
35 UpdateType m_incrementally;
36// boolean m_incrementally;
37
38 DataStore m_store;
39 Dictionary m_dict;
40 Set<Atom> triplesByTerm = new HashSet<Atom>();
41
42 public RDFoxTripleManager(DataStore store, boolean incrementally) {
43 m_store = store;
44// m_incrementally = incrementally;
45 if (incrementally)
46 m_incrementally = UpdateType.ScheduleForAddition;
47 else
48 m_incrementally = UpdateType.Add;
49
50 try {
51 m_dict = store.getDictionary();
52 resourceID = m_dict.resolveResources(
53 new String[] {Namespace.RDF_TYPE, Namespace.EQUALITY, Namespace.INEQUALITY},
54 new int[] {Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value()}
55 );
56 } catch (JRDFStoreException e) {
57 e.printStackTrace();
58 }
59 }
60
61 public boolean isRdfTypeID(int id) {
62 return id == resourceID[0];
63 }
64
65 public void addTripleByID(int[] tuple) {
66 try {
67 m_store.addTriplesByResourceIDs(tuple, m_incrementally);
68 } catch (JRDFStoreException e) {
69 e.printStackTrace();
70 }
71 }
72
73 public void addTripleByTerm(Atom atom) {
74 try {
75 m_store.addTriples(getRDFoxTriple(atom), m_incrementally);
76 } catch (JRDFStoreException e) {
77 e.printStackTrace();
78 }
79 }
80
81 public static GroundTerm[] getRDFoxTriple(Atom instance) {
82 if (instance.getArity() == 1)
83 return new GroundTerm[] {
84 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
85 uk.ac.ox.cs.JRDFox.model.Individual.RDF_TYPE,
86 uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicConcept) instance.getDLPredicate()).getIRI()) };
87 else if (instance.getDLPredicate() instanceof Equality || instance.getDLPredicate() instanceof AnnotatedEquality)
88 return new GroundTerm[] {
89 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
90 uk.ac.ox.cs.JRDFox.model.Individual.SAME_AS,
91 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
92 else if (instance.getDLPredicate() instanceof Inequality)
93 return new GroundTerm[] {
94 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
95 uk.ac.ox.cs.JRDFox.model.Individual.DIFFERENT_FROM,
96 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
97 else
98 return new GroundTerm[] {
99 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
100 uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicRole) instance.getDLPredicate()).getIRI()),
101 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
102 }
103
104 int[] resourceID; // rdf:type, owl:sameAs, owl:differentFrom
105
106 public int[] getInstance(Atom atom, Map<Variable, Integer> assignment) {
107 DLPredicate p = atom.getDLPredicate();
108 if (p instanceof Equality || p instanceof AnnotatedEquality)
109 return new int[] {
110 getResourceID(atom.getArgument(0), assignment),
111 resourceID[1],
112 getResourceID(atom.getArgument(1), assignment)
113 };
114 else if (p instanceof Inequality)
115 return new int[] {
116 getResourceID(atom.getArgument(0), assignment),
117 resourceID[2],
118 getResourceID(atom.getArgument(1), assignment)
119 };
120 else if (atom.getArity() == 1)
121 return new int[] {
122 getResourceID(atom.getArgument(0), assignment),
123 resourceID[0],
124 getResourceID(p)
125 };
126 else
127 return new int[] {
128 getResourceID(atom.getArgument(0), assignment),
129 getResourceID(p),
130 getResourceID(atom.getArgument(1), assignment)
131 };
132 }
133
134 public String getRawTerm(int id) {
135 Resource[] res = new Resource[1];
136 try {
137 m_dict.getResources(new int[] {id}, 0, 1, res);
138 } catch (JRDFStoreException e) {
139 e.printStackTrace();
140 }
141 return getQuotedTerm(res[0]);
142 }
143
144 Map<String, Integer> predicateCache = new HashMap<String, Integer>();
145
146 public int getResourceID(DLPredicate p) {
147 Integer id;
148 String name = p instanceof AtomicConcept ? ((AtomicConcept) p).getIRI() : ((AtomicRole) p).getIRI();
149 if ((id = predicateCache.get(name)) != null) return id;
150 try {
151 predicateCache.put(name, id = resolveResource(name, Datatype.IRI_REFERENCE.value()));
152
153 } catch (JRDFStoreException e) {
154 e.printStackTrace();
155 }
156 return id;
157 }
158
159 public int getResourceID(String name) {
160 Integer id = null;
161 try {
162 id = resolveResource(name, Datatype.IRI_REFERENCE.value());
163 } catch (JRDFStoreException e) {
164 e.printStackTrace();
165 }
166 return id;
167 }
168
169 private int resolveResource(String name, int type) throws JRDFStoreException {
170 String[] lexicalForms = new String[] {name};
171 int[] types = new int[] {type};
172 return m_dict.resolveResources(lexicalForms, types)[0];
173 }
174
175 Map<Term, Integer> termCache = new HashMap<Term, Integer>();
176 Queue<Term> termList = new LinkedList<Term>();
177 int sizeLimit = 10000;
178
179 private int getResourceID(Term arg, Map<Variable, Integer> assignment) {
180 while (termCache.size() > sizeLimit)
181 termCache.remove(termList.poll());
182
183 if (arg instanceof Variable) return assignment.get((Variable) arg);
184 Integer id = null;
185 if ((id = termCache.get(arg)) != null)
186 return id;
187
188// if (arg instanceof Individual) {
189 try {
190 if (arg instanceof Individual)
191 termCache.put(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value()));
192 else if (arg instanceof Constant)
193 termCache.put(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI())));
194
195 } catch (JRDFStoreException e) {
196 e.printStackTrace();
197 }
198// }
199
200 return id;
201 }
202
203 private static int getDatatypeID(String uri) {
204 if (uri.equals("http://www.w3.org/2001/XMLSchema#string")) return Datatype.XSD_STRING.value();
205 if (uri.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral")) return Datatype.RDF_PLAIN_LITERAL.value();
206 if (uri.equals("http://www.w3.org/2001/XMLSchema#integer")) return Datatype.XSD_INTEGER.value();
207 if (uri.equals("http://www.w3.org/2001/XMLSchema#float")) return Datatype.XSD_FLOAT.value();
208 if (uri.equals("http://www.w3.org/2001/XMLSchema#double")) return Datatype.XSD_DOUBLE.value();
209 if (uri.equals("http://www.w3.org/2001/XMLSchema#boolean")) return Datatype.XSD_BOOLEAN.value();
210 if (uri.equals("http://www.w3.org/2001/XMLSchema#dateTime")) return Datatype.XSD_DATE_TIME.value();
211 if (uri.equals("http://www.w3.org/2001/XMLSchema#time")) return Datatype.XSD_TIME.value();
212 if (uri.equals("http://www.w3.org/2001/XMLSchema#date")) return Datatype.XSD_DATE.value();
213 if (uri.equals("http://www.w3.org/2001/XMLSchema#gYearMonth")) return Datatype.XSD_G_YEAR_MONTH.value();
214 if (uri.equals("http://www.w3.org/2001/XMLSchema#gYear")) return Datatype.XSD_G_YEAR.value();
215 if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonthDay")) return Datatype.XSD_G_MONTH_DAY.value();
216 if (uri.equals("http://www.w3.org/2001/XMLSchema#gDay")) return Datatype.XSD_G_DAY.value();
217 if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonth")) return Datatype.XSD_G_MONTH.value();
218 if (uri.equals("http://www.w3.org/2001/XMLSchema#duration")) return Datatype.XSD_DURATION.value();
219
220 return -1;
221 }
222
223 public int[] getResourceIDs(Collection<uk.ac.ox.cs.JRDFox.model.Individual> individuals) {
224 String[] str = new String[individuals.size()];
225 int[] types = new int[individuals.size()];
226 int index = 0;
227 for (uk.ac.ox.cs.JRDFox.model.Individual individual : individuals) {
228 types[index] = Datatype.IRI_REFERENCE.value();
229 str[index++] = individual.getIRI();
230 }
231
232 try {
233 return m_dict.resolveResources(str, types);
234 } catch (JRDFStoreException e) {
235 e.printStackTrace();
236 return null;
237 }
238 }
239
240 public static String getQuotedTerm(Resource r) {
241 if (r.m_datatype.equals(Datatype.IRI_REFERENCE))
242 return OWLHelper.addAngles(r.m_lexicalForm);
243 if (r.m_datatype.equals(Datatype.XSD_STRING) || r.m_datatype.equals(Datatype.RDF_PLAIN_LITERAL))
244 return "\"" + r.m_lexicalForm + "\"";
245 else
246 return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">";
247 }
248
249}