aboutsummaryrefslogtreecommitdiff
path: root/src/uk/ac/ox/cs/pagoda/reasoner/light
diff options
context:
space:
mode:
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner/light')
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java422
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java24
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java95
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java109
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java117
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java138
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java269
7 files changed, 0 insertions, 1174 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java
deleted file mode 100644
index 034827e..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java
+++ /dev/null
@@ -1,422 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.HermiT.model.DLClause;
4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
5import uk.ac.ox.cs.JRDFox.store.DataStore;
6import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
7import uk.ac.ox.cs.JRDFox.store.Parameters;
8import uk.ac.ox.cs.JRDFox.store.TripleStatus;
9import uk.ac.ox.cs.JRDFox.store.TupleIterator;
10import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
11import uk.ac.ox.cs.pagoda.query.AnswerTuples;
12import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
13import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
14import uk.ac.ox.cs.pagoda.rules.Program;
15import uk.ac.ox.cs.pagoda.util.*;
16import uk.ac.ox.cs.pagoda.util.Timer;
17import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
18
19import java.util.*;
20
21public class BasicQueryEngine extends RDFoxQueryEngine {
22
23 protected DataStore store;
24 protected Parameters parameters = new Parameters();
25 Set<DLClause> materialisedRules = new HashSet<DLClause>();
26 private UFS<String> equalityGroups = null;
27
28 public BasicQueryEngine(String name) {
29 super(name);
30 store = RDFoxQueryEngine.createDataStore();
31 parameters.m_allAnswersInRoot = true;
32 parameters.m_useBushy = true;
33 }
34
35 /***
36 * @return Overall number of triples.
37 */
38 public long getStoreSize() throws JRDFStoreException {
39 return store.getTriplesCount();
40 }
41
42 public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) {
43 if(isDisposed()) throw new DisposedException();
44 if(gap != null) {
45 materialise("lower program", dProgram.getLower().toString());
46 String program = dProgram.getUpper().toString();
47 try {
48 gap.compile(program);
49 gap.addBackTo();
50 getDataStore().clearRulesAndMakeFactsExplicit();
51 } catch(JRDFStoreException e) {
52 e.printStackTrace();
53 } finally {
54 gap.clear();
55 }
56 }
57 else
58 materialise("upper program", dProgram.getUpper().toString());
59 }
60
61 public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) {
62 if(isDisposed()) throw new DisposedException();
63 if(gap != null) {
64 materialise("lower program", dProgram.getLower().toString());
65 String program = dProgram.getUpper().toString();
66 try {
67 gap.compile(program);
68 gap.addBackTo();
69 getDataStore().clearRulesAndMakeFactsExplicit();
70 } catch(JRDFStoreException e) {
71 e.printStackTrace();
72 } finally {
73 gap.clear();
74 }
75 }
76 else
77 materialise("upper program", dProgram.getUpper().toString());
78
79 return 1;
80 }
81
82 @Override
83 public AnswerTuples evaluate(String queryText) {
84 if(isDisposed()) throw new DisposedException();
85 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]);
86 }
87
88 @Override
89 public AnswerTuples evaluate(String queryText, String[] answerVars) {
90 if(isDisposed()) throw new DisposedException();
91 TupleIterator tupleIterator;
92 try {
93 tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters);
94 } catch(JRDFStoreException e) {
95 e.printStackTrace();
96 return null;
97 }
98 return new RDFoxAnswerTuples(answerVars, tupleIterator);
99 }
100
101 @Override
102 public DataStore getDataStore() {
103 if(isDisposed()) throw new DisposedException();
104 return store;
105 }
106
107 @Override
108 public void dispose() {
109 super.dispose();
110 store.dispose();
111 }
112
113 public void outputInstance4BinaryPredicate(String iri, String filename) {
114 if(isDisposed()) throw new DisposedException();
115
116 Utility.redirectCurrentOut(filename);
117 outputInstance4BinaryPredicate(iri);
118 Utility.closeCurrentOut();
119 }
120
121 public void outputInstance4BinaryPredicate(String iri) {
122 if(isDisposed()) throw new DisposedException();
123
124 outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }");
125 }
126
127 public void outputInstanceNumbers(String filename) {
128 if(isDisposed()) throw new DisposedException();
129
130 TupleIterator predicateTuples = null;
131 TupleIterator instanceTuples;
132 Set<String> number = new HashSet<String>();
133 String predicate;
134 try {
135 predicateTuples =
136 getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters);
137 for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) {
138 predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0));
139 instanceTuples = null;
140 try {
141 instanceTuples =
142 getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters);
143 long totalCount = 0;
144 for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) {
145 totalCount += instanceTuples.getMultiplicity();
146 }
147 number.add(predicate + " * " + totalCount);
148 } finally {
149 if(instanceTuples != null) instanceTuples.dispose();
150 }
151 }
152 } catch(JRDFStoreException e) {
153 e.printStackTrace();
154 } finally {
155 if(predicateTuples != null) predicateTuples.dispose();
156 predicateTuples = null;
157 }
158
159 try {
160 predicateTuples =
161 getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters);
162 for(long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) {
163 predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0));
164 instanceTuples = null;
165 try {
166 instanceTuples =
167 getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters);
168 long totalCount = 0;
169 for(long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext())
170 totalCount += instanceTuples.getMultiplicity();
171 number.add(predicate + " * " + totalCount);
172 } finally {
173 if(instanceTuples != null) instanceTuples.dispose();
174 }
175 }
176
177 } catch(JRDFStoreException e) {
178 e.printStackTrace();
179 } finally {
180 if(predicateTuples != null) predicateTuples.dispose();
181 predicateTuples = null;
182 }
183
184 Utility.redirectCurrentOut(filename);
185 String[] ordered = number.toArray(new String[0]);
186 Arrays.sort(ordered, new DLPredicateComparator());
187 for(String line : ordered) System.out.println(line);
188 Utility.closeCurrentOut();
189
190 }
191
192 public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException {
193 if(isDisposed()) throw new DisposedException();
194
195 TupleIterator iter =
196 store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB);
197// iter.open();
198 return iter;
199 }
200
201 public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException {
202 if(isDisposed()) throw new DisposedException();
203
204 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters);
205// iter.open();
206 return iter;
207 }
208
209 public void setExpandEquality(boolean flag) {
210 if(isDisposed()) throw new DisposedException();
211
212 parameters.m_expandEquality = flag;
213 }
214
215 public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException {
216 if(isDisposed()) throw new DisposedException();
217
218 parameters.m_expandEquality = false;
219 TupleIterator iter = store.compileQuery(queryText, prefixes, parameters);
220// iter.open();
221 parameters.m_expandEquality = true;
222 return iter;
223 }
224
225 public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException {
226 if(isDisposed()) throw new DisposedException();
227
228 return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText);
229 }
230
231 public String getUnusedRules(Collection<DLClause> clauses, boolean toUpdate) {
232 if(isDisposed()) throw new DisposedException();
233
234 DLClause clause;
235 for(Iterator<DLClause> iter = clauses.iterator(); iter.hasNext(); ) {
236 if(materialisedRules.contains(clause = iter.next()))
237 iter.remove();
238 else if(toUpdate) materialisedRules.add(clause);
239 }
240
241 if(clauses.isEmpty()) return null;
242
243 return Program.toString(clauses);
244 }
245
246 public void outputMaterialisedRules() {
247 if(isDisposed()) throw new DisposedException();
248
249 System.out.println(DLClauseHelper.toString(materialisedRules));
250 }
251
252 public void outputAnswers(String query) {
253 if(isDisposed()) throw new DisposedException();
254
255 TupleIterator iter = null;
256 try {
257 iter = internal_evaluate(query);
258 System.out.println(query);
259 int arity = iter.getArity();
260 for(long multi = iter.open(); multi != 0; multi = iter.getNext()) {
261 for(int i = 0; i < arity; ++i)
262 System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t");
263 System.out.println();
264 }
265 } catch(JRDFStoreException e) {
266 e.printStackTrace();
267 } finally {
268 if(iter != null) iter.dispose();
269 }
270 }
271
272 public void outputInstance4UnaryPredicate(String iri) {
273 if(isDisposed()) throw new DisposedException();
274
275 outputAnswers("select ?x where { ?x "
276 + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <"
277 + iri
278 + "> .}");
279 }
280
281 public void outputSubjects(String p, String o) {
282 if(isDisposed()) throw new DisposedException();
283
284 outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }");
285 }
286
287 public void outputObjects(String s, String p) {
288 if(isDisposed()) throw new DisposedException();
289
290 outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }");
291 }
292
293 public void outputIDBFacts() {
294 if(isDisposed()) throw new DisposedException();
295
296 TupleIterator iter = null;
297 try {
298 iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }");
299 for(long multi = iter.open(); multi != 0; multi = iter.getNext()) {
300 for(int i = 0; i < 3; ++i)
301 System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t");
302 System.out.println();
303 }
304 } catch(JRDFStoreException e) {
305 // TODO Auto-generated catch block
306 e.printStackTrace();
307 } finally {
308 if(iter != null) iter.dispose();
309 }
310
311 }
312
313 public void outputType4Individual(String iri) {
314 if(isDisposed()) throw new DisposedException();
315
316 outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }");
317 }
318
319 public int getSameAsNumber() {
320 if(isDisposed()) throw new DisposedException();
321
322 TupleIterator iter = null;
323 int counter = 0;
324 try {
325 iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }");
326 for(long multi = iter.open(); multi != 0; multi = iter.getNext())
327 if(iter.getResourceID(0) != iter.getResourceID(1))
328 ++counter;
329 } catch(JRDFStoreException e) {
330 e.printStackTrace();
331 } finally {
332 if(iter != null) iter.dispose();
333 }
334 return counter;
335 }
336
337 public UFS<String> getEqualityGroups(boolean reuse) {
338 if(isDisposed()) throw new DisposedException();
339
340 if(reuse && equalityGroups != null) return equalityGroups;
341
342 equalityGroups = new UFS<String>();
343
344 TupleIterator answers = null;
345 try {
346 Timer t = new Timer();
347 answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }");
348 for(long multi = answers.open(); multi != 0; multi = answers.getNext()) {
349 if(answers.getResourceID(0) != answers.getResourceID(1))
350 equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm);
351 }
352 Utility.logInfo("@Time to group individuals by equality: " + t.duration());
353 } catch(JRDFStoreException e) {
354 e.printStackTrace();
355 } finally {
356 if(answers != null) answers.dispose();
357 }
358
359 return equalityGroups;
360 }
361
362 public void clearRulesAndIDBFacts(Collection<int[]> collection) {
363 if(isDisposed()) throw new DisposedException();
364
365// performDeletion(collection);
366 collection.clear();
367 try {
368 store.clearRulesAndMakeFactsExplicit();
369 } catch(JRDFStoreException e) {
370 e.printStackTrace();
371 }
372 }
373
374 protected void outputClassAssertions(String filename) {
375 TupleIterator allTuples = null;
376 boolean redirect = false;
377 try {
378 allTuples =
379 getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters);
380 redirect = Utility.redirectCurrentOut(filename);
381 for(long multi = allTuples.open(); multi != 0; multi = allTuples.getNext())
382 System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager
383 .getQuotedTerm(allTuples.getResource(1)));
384 } catch(JRDFStoreException e) {
385 e.printStackTrace();
386 } finally {
387 if(redirect) Utility.closeCurrentOut();
388 if(allTuples != null) allTuples.dispose();
389 }
390 }
391
392 @SuppressWarnings("unused")
393 private void performDeletion(Collection<int[]> collection) {
394 Utility.logInfo("Remove all rules, idb facts and added staff...");
395 Timer timer = new Timer();
396 TupleIterator iter = null;
397 try {
398 UpdateType ut = UpdateType.ScheduleForDeletion;
399 for(int[] t : collection)
400 store.addTriplesByResourceIDs(t, ut);
401
402 try {
403 iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }");
404 for(long multi = iter.open(); multi != 0; multi = iter.getNext()) {
405 int[] triple = new int[3];
406 for(int i = 0; i < 3; ++i)
407 triple[i] = iter.getResourceID(i);
408 store.addTriplesByResourceIDs(triple, ut);
409 }
410 } finally {
411 if(iter != null) iter.dispose();
412 iter = null;
413 }
414 store.applyReasoning(true);
415 } catch(JRDFStoreException e) {
416 e.printStackTrace();
417 }
418 Utility.logInfo("Time for deletion: " + timer.duration());
419 }
420
421
422}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
deleted file mode 100644
index 05e399e..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
+++ /dev/null
@@ -1,24 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import uk.ac.ox.cs.pagoda.multistage.Normalisation;
4import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
5
6import java.util.Comparator;
7
8public class DLPredicateComparator implements Comparator<String> {
9
10 @Override
11 public int compare(String arg0, String arg1) {
12 int ret = type(arg0) - type(arg1);
13 if (ret != 0) return ret;
14
15 return arg0.compareTo(arg1);
16 }
17
18 private int type(String p) {
19 if (p.contains(OverApproxExist.negativeSuffix)) return 1;
20 if (p.contains(Normalisation.auxiliaryConceptPrefix)) return 2;
21 else return 0;
22 }
23
24}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java
deleted file mode 100644
index 03d2b67..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java
+++ /dev/null
@@ -1,95 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import java.io.FileNotFoundException;
4import java.io.IOException;
5import java.util.LinkedList;
6import java.util.Map.Entry;
7
8import org.semanticweb.HermiT.model.DLClause;
9import org.semanticweb.karma2.exception.IllegalInputQueryException;
10import org.semanticweb.karma2.model.ConjunctiveQuery;
11import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryParser;
12import uk.ac.ox.cs.pagoda.MyPrefixes;
13import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
14import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
15import uk.ac.ox.cs.pagoda.util.Utility;
16
17public class KarmaQuery {
18
19 StringBuffer queryBuffer;
20
21 public KarmaQuery(String queryText) {
22 LinkedList<String> answerVariables = new LinkedList<String>();
23 DLClause clause = DLClauseHelper.getQuery(queryText, answerVariables);
24 String clauseText = RuleHelper.getText(clause);
25// clauseText = RuleHelper.abbreviateIRI(clauseText).replace(":-", "<-");
26 clauseText = clauseText.replace(":-", "<-");
27 queryBuffer = new StringBuffer();
28
29 clauseText = expandIRI4Arguments(clauseText);
30
31 for (Entry<String, String> entry : MyPrefixes.PAGOdAPrefixes.getPrefixIRIsByPrefixName().entrySet())
32 if (clauseText.contains(entry.getKey())) {
33 if (queryBuffer.length() > 0) queryBuffer.append(',').append(Utility.LINE_SEPARATOR);
34 queryBuffer.append("prefix ").append(entry.getKey()).append(" <").append(entry.getValue()).append(">");
35 }
36 if (queryBuffer.length() > 0) queryBuffer.append(Utility.LINE_SEPARATOR);
37
38 queryBuffer.append("p(");
39 boolean first = true;
40 for (String var: answerVariables) {
41 if (first) first = false;
42 else queryBuffer.append(",");
43
44 queryBuffer.append("?").append(var);
45 }
46 queryBuffer.append(")").append(clauseText.substring(0, clauseText.length() - 1));
47 }
48
49 private String expandIRI4Arguments(String clauseText) {
50 int leftIndex = clauseText.indexOf('('), rightIndex = clauseText.indexOf(')', leftIndex + 1);
51 String argsText, newArgsText;
52 while (leftIndex != -1) {
53 argsText = clauseText.substring(leftIndex + 1, rightIndex);
54 newArgsText = MyPrefixes.PAGOdAPrefixes.expandText(argsText);
55 clauseText = clauseText.replace(argsText, newArgsText);
56
57 rightIndex += newArgsText.length() - argsText.length();
58 leftIndex = clauseText.indexOf('(', rightIndex + 1);
59 rightIndex = clauseText.indexOf(')', leftIndex + 1);
60 }
61
62 return clauseText;
63 }
64
65 public ConjunctiveQuery getConjunctiveQuery() {
66 ConjunctiveQuery cq = null;
67 try {
68 cq = new ConjunctiveQueryParser(toString()).parse();
69 } catch (FileNotFoundException e) {
70 // TODO Auto-generated catch block
71 e.printStackTrace();
72 } catch (IllegalInputQueryException e) {
73 // TODO Auto-generated catch block
74 e.printStackTrace();
75 } catch (IOException e) {
76 // TODO Auto-generated catch block
77 e.printStackTrace();
78 } catch (Exception e) {
79 Utility.logDebug("The query cannot be properly handled by KARMA.");
80 return null;
81 }
82 return cq;
83 }
84
85 @Override
86 public String toString() {
87 return queryBuffer.toString();
88 }
89
90 static String sample = "prefix P0: <http://swat.cse.lehigh.edu/onto/univ-bench.owl#>, " +
91 "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>, " +
92 "prefix owl: <http://www.w3.org/2002/07/owl#>" +
93 "q(?0) <- owl:Thing(?0), P0:Person(?0)";
94
95}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
deleted file mode 100644
index 98f0c35..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
+++ /dev/null
@@ -1,109 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.karma2.MyKarma;
4import org.semanticweb.karma2.clausifier.OntologyProcesser;
5import org.semanticweb.karma2.exception.IllegalInputOntologyException;
6import org.semanticweb.karma2.model.ConjunctiveQuery;
7import org.semanticweb.owlapi.model.OWLOntology;
8import uk.ac.ox.cs.JRDFox.JRDFStoreException;
9import uk.ac.ox.cs.JRDFox.store.DataStore;
10import uk.ac.ox.cs.pagoda.query.AnswerTuple;
11import uk.ac.ox.cs.pagoda.query.AnswerTuples;
12import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp;
13import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
14import uk.ac.ox.cs.pagoda.util.Utility;
15import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
16
17import java.io.File;
18import java.io.FileNotFoundException;
19import java.nio.file.Paths;
20import java.util.Set;
21
22public class KarmaQueryEngine extends RDFoxQueryEngine {
23
24 String karmaDataFile = null, karmaRuleFile = null;
25 private MyKarma reasoner = null;
26
27 public KarmaQueryEngine(String name) {
28 super(name);
29
30// int Base = 1 << 6;
31// int index = (new Random().nextInt() % Base + Base) % Base;
32// karmaDataFile = "karma_data" + index + ".ttl";
33// karmaRuleFile = "karma_rule" + index + ".dlog";
34 karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString();
35 karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString();
36
37 reasoner = new MyKarma();
38 }
39
40 public MyKarma getReasoner() {
41 if(isDisposed()) throw new DisposedException();
42 return reasoner;
43 }
44
45 public void processOntology(OWLOntology elhoOntology) {
46 if(isDisposed()) throw new DisposedException();
47 try {
48 OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile));
49 } catch(IllegalInputOntologyException e) {
50 e.printStackTrace();
51 }
52 }
53
54 @Override
55 public void dispose() {
56 super.dispose();
57 reasoner.dispose();
58 }
59
60 @Override
61 public AnswerTuples evaluate(String queryText) {
62 if(isDisposed()) throw new DisposedException();
63 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null);
64 }
65
66 @Override
67 public AnswerTuples evaluate(String queryText, String[] answerVars) {
68 if(isDisposed()) throw new DisposedException();
69 return evaluate(queryText, answerVars, null);
70 }
71
72 public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) {
73 if(isDisposed()) throw new DisposedException();
74 return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples);
75 }
76
77 public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) {
78 if(isDisposed()) throw new DisposedException();
79 KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?"));
80 reasoner.setConcurrence(false);
81 ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery();
82 if(cq == null) return null;
83 Set<AnswerTuple> answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:"));
84 return new AnswerTuplesImp(answerVars, answers);
85 }
86
87 @Override
88 public DataStore getDataStore() {
89 if(isDisposed()) throw new DisposedException();
90 return reasoner.getStore();
91 }
92
93 public void initialiseKarma() {
94 if(isDisposed()) throw new DisposedException();
95 try {
96 reasoner.initializeData(new File(karmaDataFile));
97 reasoner.materialise(new File(karmaRuleFile));
98
99 File tmp;
100 if(karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete();
101 if(karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete();
102 } catch(FileNotFoundException e) {
103 e.printStackTrace();
104 } catch(JRDFStoreException e) {
105 e.printStackTrace();
106 }
107 }
108
109}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java
deleted file mode 100644
index f823232..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java
+++ /dev/null
@@ -1,117 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.HermiT.model.Constant;
4import org.semanticweb.HermiT.model.Individual;
5import org.semanticweb.HermiT.model.Term;
6import uk.ac.ox.cs.JRDFox.JRDFStoreException;
7import uk.ac.ox.cs.JRDFox.model.GroundTerm;
8import uk.ac.ox.cs.JRDFox.store.TupleIterator;
9import uk.ac.ox.cs.pagoda.query.AnswerTuple;
10import uk.ac.ox.cs.pagoda.query.AnswerTuples;
11import uk.ac.ox.cs.pagoda.util.Utility;
12import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
13
14public class RDFoxAnswerTuples extends AnswerTuples {
15
16 long multi;
17 TupleIterator m_iter;
18 String[] m_answerVars;
19
20 public RDFoxAnswerTuples(String[] answerVars, TupleIterator iter) {
21 m_answerVars = answerVars;
22 m_iter = iter;
23 reset();
24 }
25
26 public static Term getHermitTerm(GroundTerm t) {
27 if(t instanceof uk.ac.ox.cs.JRDFox.model.Individual) {
28 uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t;
29 return Individual.create(individual.getIRI());
30 }
31 else {
32 uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t);
33 return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI());
34 }
35 }
36
37 @Override
38 public boolean isValid() {
39 if(isDisposed()) throw new DisposedException();
40
41 return multi != 0;
42 }
43
44 @Override
45 public int getArity() {
46 if(isDisposed()) throw new DisposedException();
47
48 try {
49 return m_iter.getArity();
50 } catch (JRDFStoreException e) {
51 e.printStackTrace();
52 return -1;
53 }
54 }
55
56 @Override
57 public void moveNext() {
58 if(isDisposed()) throw new DisposedException();
59
60 try {
61 multi = m_iter.getNext();
62 } catch (JRDFStoreException e) {
63 e.printStackTrace();
64 }
65 }
66
67 @Override
68 public void dispose() {
69 super.dispose();
70 m_iter.dispose();
71 }
72
73 @Override
74 public AnswerTuple getTuple() {
75 if(isDisposed()) throw new DisposedException();
76
77 return new AnswerTuple(m_iter, m_answerVars.length);
78 }
79
80 @Override
81 public void reset() {
82 if(isDisposed()) throw new DisposedException();
83
84 try {
85 multi = m_iter.open();
86 } catch (JRDFStoreException e) {
87 e.printStackTrace();
88 }
89 }
90
91 @Override
92 public boolean contains(AnswerTuple t) {
93 if(isDisposed()) throw new DisposedException();
94
95 Utility.logError("Unsupported operation in RDFoxAnswerTuples");
96 return false;
97 }
98
99 @Override
100 public void remove() {
101 if(isDisposed()) throw new DisposedException();
102
103 Utility.logError("Unsupported operation in RDFoxAnswerTuples");
104 }
105
106 @Override
107 public String[] getAnswerVariables() {
108 if(isDisposed()) throw new DisposedException();
109
110 return m_answerVars;
111 }
112
113 protected void finalize() {
114 m_iter.dispose();
115 }
116
117}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
deleted file mode 100644
index 8b22919..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
+++ /dev/null
@@ -1,138 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import org.semanticweb.owlapi.model.OWLOntology;
4import org.semanticweb.owlapi.model.OWLOntologyCreationException;
5import uk.ac.ox.cs.JRDFox.JRDFStoreException;
6import uk.ac.ox.cs.JRDFox.Prefixes;
7import uk.ac.ox.cs.JRDFox.store.DataStore;
8import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType;
9import uk.ac.ox.cs.pagoda.MyPrefixes;
10import uk.ac.ox.cs.pagoda.query.AnswerTuples;
11import uk.ac.ox.cs.pagoda.reasoner.QueryEngine;
12import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
13import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter;
14import uk.ac.ox.cs.pagoda.util.Timer;
15import uk.ac.ox.cs.pagoda.util.Utility;
16import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
17
18import java.io.File;
19import java.util.Collection;
20
21public abstract class RDFoxQueryEngine extends QueryEngine {
22
23 public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2;
24 protected String name;
25 protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
26
27 public RDFoxQueryEngine(String name) {
28 this.name = name;
29 }
30
31 public static DataStore createDataStore() {
32 DataStore instance = null;
33 try {
34// instance = new DataStore("par-head-n");
35 instance = new DataStore(StoreType.NarrowParallelHead);
36 instance.setNumberOfThreads(matNoOfThreads);
37 instance.initialize();
38 } catch(JRDFStoreException e) {
39 e.printStackTrace();
40 }
41 return instance;
42 }
43
44 public String getName() {
45 if(isDisposed()) throw new DisposedException();
46 return name;
47 }
48
49 public abstract DataStore getDataStore();
50
51 public void importRDFData(String fileName, String importedFile) {
52 if(isDisposed()) throw new DisposedException();
53 if(importedFile == null || importedFile.isEmpty()) return;
54 Timer t = new Timer();
55 DataStore store = getDataStore();
56 try {
57 long oldTripleCount = store.getTriplesCount(), tripleCount;
58 for(String file : importedFile.split(QueryReasoner.ImportDataFileSeparator)) {
59 store.importTurtleFile(new File(file), prefixes);
60 }
61 tripleCount = store.getTriplesCount();
62 Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
63 store.clearRulesAndMakeFactsExplicit();
64 } catch(JRDFStoreException e) {
65 e.printStackTrace();
66 }
67 Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds.");
68 }
69
70 public void importDataFromABoxOf(OWLOntology ontology) {
71 if(isDisposed()) throw new DisposedException();
72 DataStore store = getDataStore();
73 try {
74 long prevTriplesCount = store.getTriplesCount();
75 store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true)));
76 long loadedTriples = store.getTriplesCount() - prevTriplesCount;
77 Utility.logDebug(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true)
78 .size() + " ABox axioms");
79 } catch(JRDFStoreException | OWLOntologyCreationException e) {
80 e.printStackTrace();
81 System.exit(1);
82 }
83
84 }
85
86 public void materialise(String programName, String programText) {
87 if(isDisposed()) throw new DisposedException();
88 if(programText == null) return;
89 Timer t = new Timer();
90 DataStore store = getDataStore();
91 try {
92 long oldTripleCount = store.getTriplesCount(), tripleCount;
93// store.addRules(new String[] {programText});
94 store.importRules(programText);
95 store.applyReasoning();
96 tripleCount = store.getTriplesCount();
97 Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
98 store.clearRulesAndMakeFactsExplicit();
99 } catch(JRDFStoreException e) {
100 e.printStackTrace();
101 }
102 Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds.");
103 }
104
105 @Override
106 public void evaluate(Collection<String> queryTexts, String answerFile) {
107 if(isDisposed()) throw new DisposedException();
108 if(queryTexts == null)
109 return;
110
111 int queryID = 0;
112 AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile);
113 AnswerTuples answerTuples;
114 Timer t = new Timer();
115 try {
116 for(String query : queryTexts) {
117 t.reset();
118 answerTuples = null;
119 try {
120 answerTuples = evaluate(query);
121 Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration());
122 answerWriter.write(answerTuples.getAnswerVariables(), answerTuples);
123 } finally {
124 if(answerTuples != null) answerTuples.dispose();
125 }
126 }
127 } finally {
128 answerWriter.close();
129 }
130
131 Utility.logDebug("done computing query answers by RDFox.");
132 }
133
134 @Override
135 public void dispose() {
136 super.dispose();
137 }
138}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
deleted file mode 100644
index 62885be..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
+++ /dev/null
@@ -1,269 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner.light;
2
3import net.sf.ehcache.Cache;
4import net.sf.ehcache.CacheManager;
5import net.sf.ehcache.Element;
6import org.semanticweb.HermiT.model.*;
7import uk.ac.ox.cs.JRDFox.JRDFStoreException;
8import uk.ac.ox.cs.JRDFox.model.Datatype;
9import uk.ac.ox.cs.JRDFox.model.GroundTerm;
10import uk.ac.ox.cs.JRDFox.store.DataStore;
11import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
12import uk.ac.ox.cs.JRDFox.store.Dictionary;
13import uk.ac.ox.cs.JRDFox.store.Resource;
14import uk.ac.ox.cs.pagoda.owl.OWLHelper;
15import uk.ac.ox.cs.pagoda.util.Namespace;
16
17import java.util.Collection;
18import java.util.HashMap;
19import java.util.Map;
20
21public class RDFoxTripleManager {
22
23 private final Cache termsCache;
24 private static final int TERMS_CACHE_SIZE = 10000;
25 private static final int CACHE_TTL_DEFAULT = 0;
26 private static final int CACHE_TTI_DEFAULT = 0;
27 private static final boolean CACHE_ETERNAL = true;
28 private static final boolean CACHE_USE_DISK = false;
29
30 UpdateType m_incrementally;
31// boolean m_incrementally;
32
33 DataStore m_store;
34 Dictionary m_dict;
35// Set<Atom> triplesByTerm = new HashSet<Atom>();
36
37 public RDFoxTripleManager(DataStore store, boolean incrementally) {
38 m_store = store;
39// m_incrementally = incrementally;
40
41 CacheManager cacheManager = CacheManager.getInstance();
42 String cacheName = "RDFoxTripleManager_" + store.hashCode();
43 if(! cacheManager.cacheExists(cacheName)) {
44 termsCache = new Cache(cacheName,
45 TERMS_CACHE_SIZE, CACHE_USE_DISK, CACHE_ETERNAL,
46 CACHE_TTL_DEFAULT, CACHE_TTI_DEFAULT);
47 cacheManager.addCache(termsCache);
48 }
49 else
50 termsCache = cacheManager.getCache(cacheName);
51
52 if (incrementally)
53 m_incrementally = UpdateType.ScheduleForAddition;
54 else
55 m_incrementally = UpdateType.Add;
56
57 try {
58 m_dict = store.getDictionary();
59 resourceID = m_dict.resolveResources(
60 new String[] {Namespace.RDF_TYPE, Namespace.EQUALITY, Namespace.INEQUALITY},
61 new int[] {Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value()}
62 );
63 } catch (JRDFStoreException e) {
64 e.printStackTrace();
65 }
66 }
67
68 public boolean isRdfTypeID(int id) {
69 return id == resourceID[0];
70 }
71
72 public void addTripleByID(int[] tuple) {
73// System.out.println(getRawTerm(tuple[0]) + " " + getRawTerm(tuple[1]) + " " + getRawTerm(tuple[2]) + " .");
74 try {
75// Resource[] rsc = new Resource[3];
76// m_dict.getResources(tuple, 0, 3, rsc);
77//
78// GroundTerm[] terms = new GroundTerm[3];
79// for (int i = 0; i < 3; ++i)
80// terms[i] = uk.ac.ox.cs.JRDFox.model.Individual.create(rsc[i].m_lexicalForm);
81// m_store.addTriples(terms, m_incrementally);
82
83 m_store.addTriplesByResourceIDs(tuple, m_incrementally);
84 } catch (JRDFStoreException e) {
85 e.printStackTrace();
86 }
87 }
88
89 public void addTripleByTerm(Atom atom) {
90 try {
91 m_store.addTriples(getRDFoxTriple(atom), m_incrementally);
92 } catch (JRDFStoreException e) {
93 e.printStackTrace();
94 }
95 }
96
97 public void removeTripleByTermIncrementally(Atom atom) {
98 try {
99 m_store.addTriples(getRDFoxTriple(atom), UpdateType.ScheduleForDeletion);
100 } catch (JRDFStoreException e) {
101 e.printStackTrace();
102 }
103 }
104
105 public static GroundTerm[] getRDFoxTriple(Atom instance) {
106 if (instance.getArity() == 1)
107 return new GroundTerm[] {
108 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
109 uk.ac.ox.cs.JRDFox.model.Individual.RDF_TYPE,
110 uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicConcept) instance.getDLPredicate()).getIRI()) };
111 else if (instance.getDLPredicate() instanceof Equality || instance.getDLPredicate() instanceof AnnotatedEquality)
112 return new GroundTerm[] {
113 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
114 uk.ac.ox.cs.JRDFox.model.Individual.SAME_AS,
115 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
116 else if (instance.getDLPredicate() instanceof Inequality)
117 return new GroundTerm[] {
118 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
119 uk.ac.ox.cs.JRDFox.model.Individual.DIFFERENT_FROM,
120 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
121 else
122 return new GroundTerm[] {
123 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()),
124 uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicRole) instance.getDLPredicate()).getIRI()),
125 uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) };
126 }
127
128 int[] resourceID; // rdf:type, owl:sameAs, owl:differentFrom
129
130 public int[] getInstance(Atom atom, Map<Variable, Integer> assignment) {
131 DLPredicate p = atom.getDLPredicate();
132 if (p instanceof Equality || p instanceof AnnotatedEquality)
133 return new int[] {
134 getResourceID(atom.getArgument(0), assignment),
135 resourceID[1],
136 getResourceID(atom.getArgument(1), assignment)
137 };
138 else if (p instanceof Inequality)
139 return new int[] {
140 getResourceID(atom.getArgument(0), assignment),
141 resourceID[2],
142 getResourceID(atom.getArgument(1), assignment)
143 };
144 else if (atom.getArity() == 1)
145 return new int[] {
146 getResourceID(atom.getArgument(0), assignment),
147 resourceID[0],
148 getResourceID(p)
149 };
150 else
151 return new int[] {
152 getResourceID(atom.getArgument(0), assignment),
153 getResourceID(p),
154 getResourceID(atom.getArgument(1), assignment)
155 };
156 }
157
158 public String getRawTerm(int id) {
159 Resource[] res = new Resource[1];
160 try {
161 m_dict.getResources(new int[] {id}, 0, 1, res);
162 } catch (JRDFStoreException e) {
163 e.printStackTrace();
164 }
165 return getQuotedTerm(res[0]);
166 }
167
168 Map<String, Integer> predicateCache = new HashMap<String, Integer>();
169
170 public int getResourceID(DLPredicate p) {
171 Integer id;
172 String name = p instanceof AtomicConcept ? ((AtomicConcept) p).getIRI() : ((AtomicRole) p).getIRI();
173 if ((id = predicateCache.get(name)) != null) return id;
174 try {
175 predicateCache.put(name, id = resolveResource(name, Datatype.IRI_REFERENCE.value()));
176
177 } catch (JRDFStoreException e) {
178 e.printStackTrace();
179 }
180 return id;
181 }
182
183 public int getResourceID(String name) {
184 Integer id = null;
185 try {
186 id = resolveResource(name, Datatype.IRI_REFERENCE.value());
187 } catch (JRDFStoreException e) {
188 e.printStackTrace();
189 }
190 return id;
191 }
192
193 private int resolveResource(String name, int type) throws JRDFStoreException {
194 String[] lexicalForms = new String[] {name};
195 int[] types = new int[] {type};
196 return m_dict.resolveResources(lexicalForms, types)[0];
197 }
198
199// Map<Term, Integer> termCache = new HashMap<Term, Integer>();
200// Queue<Term> termQueue = new LinkedList<Term>();
201
202 private int getResourceID(Term arg, Map<Variable, Integer> assignment) {
203 if (arg instanceof Variable) return assignment.get(arg);
204 int id = -1;
205 if(termsCache.isKeyInCache(arg))
206 return ((int) termsCache.get(arg).getObjectValue());
207
208// if (arg instanceof Individual) {
209 try {
210 if (arg instanceof Individual)
211 termsCache.put(new Element(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())));
212 else if (arg instanceof Constant)
213 termsCache.put(new Element(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))));
214
215 } catch (JRDFStoreException e) {
216 e.printStackTrace();
217 System.exit(1);
218 }
219// }
220
221 return id;
222 }
223
224 private static int getDatatypeID(String uri) {
225 if (uri.equals("http://www.w3.org/2001/XMLSchema#string")) return Datatype.XSD_STRING.value();
226 if (uri.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral")) return Datatype.RDF_PLAIN_LITERAL.value();
227 if (uri.equals("http://www.w3.org/2001/XMLSchema#integer")) return Datatype.XSD_INTEGER.value();
228 if (uri.equals("http://www.w3.org/2001/XMLSchema#float")) return Datatype.XSD_FLOAT.value();
229 if (uri.equals("http://www.w3.org/2001/XMLSchema#double")) return Datatype.XSD_DOUBLE.value();
230 if (uri.equals("http://www.w3.org/2001/XMLSchema#boolean")) return Datatype.XSD_BOOLEAN.value();
231 if (uri.equals("http://www.w3.org/2001/XMLSchema#dateTime")) return Datatype.XSD_DATE_TIME.value();
232 if (uri.equals("http://www.w3.org/2001/XMLSchema#time")) return Datatype.XSD_TIME.value();
233 if (uri.equals("http://www.w3.org/2001/XMLSchema#date")) return Datatype.XSD_DATE.value();
234 if (uri.equals("http://www.w3.org/2001/XMLSchema#gYearMonth")) return Datatype.XSD_G_YEAR_MONTH.value();
235 if (uri.equals("http://www.w3.org/2001/XMLSchema#gYear")) return Datatype.XSD_G_YEAR.value();
236 if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonthDay")) return Datatype.XSD_G_MONTH_DAY.value();
237 if (uri.equals("http://www.w3.org/2001/XMLSchema#gDay")) return Datatype.XSD_G_DAY.value();
238 if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonth")) return Datatype.XSD_G_MONTH.value();
239 if (uri.equals("http://www.w3.org/2001/XMLSchema#duration")) return Datatype.XSD_DURATION.value();
240
241 return -1;
242 }
243
244 public int[] getResourceIDs(Collection<uk.ac.ox.cs.JRDFox.model.Individual> individuals) {
245 String[] str = new String[individuals.size()];
246 int[] types = new int[individuals.size()];
247 int index = 0;
248 for (uk.ac.ox.cs.JRDFox.model.Individual individual : individuals) {
249 types[index] = Datatype.IRI_REFERENCE.value();
250 str[index++] = individual.getIRI();
251 }
252
253 try {
254 return m_dict.resolveResources(str, types);
255 } catch (JRDFStoreException e) {
256 e.printStackTrace();
257 return null;
258 }
259 }
260
261 public static String getQuotedTerm(Resource r) {
262 if (r.m_datatype.equals(Datatype.IRI_REFERENCE))
263 return OWLHelper.addAngles(r.m_lexicalForm);
264 if (r.m_datatype.equals(Datatype.XSD_STRING) || r.m_datatype.equals(Datatype.RDF_PLAIN_LITERAL))
265 return "\"" + r.m_lexicalForm + "\"";
266 else
267 return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">";
268 }
269}