aboutsummaryrefslogtreecommitdiff
path: root/src/main/java/org/semanticweb/karma2/MyKarma.java
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/java/org/semanticweb/karma2/MyKarma.java')
-rw-r--r--src/main/java/org/semanticweb/karma2/MyKarma.java469
1 files changed, 469 insertions, 0 deletions
diff --git a/src/main/java/org/semanticweb/karma2/MyKarma.java b/src/main/java/org/semanticweb/karma2/MyKarma.java
new file mode 100644
index 0000000..b2b4352
--- /dev/null
+++ b/src/main/java/org/semanticweb/karma2/MyKarma.java
@@ -0,0 +1,469 @@
1package org.semanticweb.karma2;
2
3import java.io.File;
4import java.io.FileNotFoundException;
5import java.util.Collection;
6import java.util.HashMap;
7import java.util.HashSet;
8import java.util.Iterator;
9import java.util.Map;
10import java.util.Scanner;
11import java.util.Set;
12import java.util.concurrent.Callable;
13import java.util.concurrent.ExecutionException;
14import java.util.concurrent.ExecutorService;
15import java.util.concurrent.Executors;
16import java.util.concurrent.Future;
17
18import org.jgrapht.DirectedGraph;
19import org.jgrapht.alg.CycleDetector;
20import org.jgrapht.graph.DefaultDirectedGraph;
21import org.jgrapht.graph.DefaultEdge;
22import org.semanticweb.HermiT.model.Atom;
23import org.semanticweb.HermiT.model.Individual;
24import org.semanticweb.HermiT.model.Term;
25import org.semanticweb.karma2.exception.ConstraintException;
26import org.semanticweb.karma2.model.ConjunctiveQuery;
27import org.semanticweb.karma2.model.ExtendedConjunctiveQuery;
28
29import uk.ac.ox.cs.JRDFox.model.GroundTerm;
30import uk.ac.ox.cs.JRDFox.store.DataStore;
31import uk.ac.ox.cs.JRDFox.store.Parameters;
32import uk.ac.ox.cs.JRDFox.Prefixes;
33import uk.ac.ox.cs.JRDFox.JRDFStoreException;
34import uk.ac.ox.cs.JRDFox.store.TupleIterator;
35import uk.ac.ox.cs.pagoda.MyPrefixes;
36import uk.ac.ox.cs.pagoda.query.AnswerTuple;
37import uk.ac.ox.cs.pagoda.query.AnswerTuples;
38import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine;
39import uk.ac.ox.cs.pagoda.util.Namespace;
40import uk.ac.ox.cs.pagoda.util.Timer;
41import uk.ac.ox.cs.pagoda.util.UFS;
42import uk.ac.ox.cs.pagoda.util.Utility;
43
44public class MyKarma {
45
46 private DataStore store;
47
48 private Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
49 private Parameters parameters = new Parameters();
50
51 public MyKarma() {
52 store = RDFoxQueryEngine.createDataStore();
53 parameters.m_allAnswersInRoot = true;
54 parameters.m_useBushy = true;
55 }
56
57 private UFS<String> equalityGroups = null;
58
59 public void computeEqualityGroups() {
60 if (equalityGroups != null) return ;
61 equalityGroups = new UFS<String>();
62 TupleIterator answers = null;
63 try {
64 Timer t = new Timer();
65 answers = store.compileQuery("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }", prefixes, parameters);
66 for (long multi = answers.open(); multi != 0; multi = answers.getNext()) {
67 if (answers.getResourceID(0) != answers.getResourceID(1))
68 equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm);
69 }
70 Utility.logInfo("@Time to group individuals by equality: " + t.duration());
71 } catch (JRDFStoreException e) {
72 e.printStackTrace();
73 } finally {
74 if (answers != null) answers.dispose();
75 }
76 }
77
78 public DataStore getStore() {
79 return store;
80 }
81
82 public long getNumberOfFacts() throws JRDFStoreException {
83 return store.getTriplesCount();
84 }
85
86 public void initializeData(File dataFile) throws JRDFStoreException,
87 FileNotFoundException {
88 store.importTurtleFile(dataFile, prefixes);
89 }
90
91 public void materialise(File ruleFile) throws JRDFStoreException, FileNotFoundException {
92 Timer t = new Timer();
93 Scanner scanner = new Scanner(ruleFile);
94 String datalogProgram = scanner.useDelimiter("\\Z").next();
95 scanner.close();
96 store.clearRulesAndMakeFactsExplicit();
97// store.addRules(new String[] {datalogProgram});
98 store.importRules(datalogProgram);
99 store.applyReasoning();
100 Utility.logDebug("elho-lower-store finished its own materialisation in " + t.duration() + " seconds.");
101 }
102
103 public Collection<AnswerTuple> answerCQ(ConjunctiveQuery q, boolean isGround) {
104 return answerCQ(q, null, isGround);
105 }
106
107 boolean m_multiThread = false;
108
109 public void setConcurrence(boolean multiThread) {
110 this.m_multiThread = multiThread;
111 }
112
113 public Set<AnswerTuple> answerCQ(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
114 computeEqualityGroups();
115 if (m_multiThread)
116 return answerCQ_multiThread(q, soundAnswerTuples, isGround);
117 else
118 return answerCQ_singleThread(q, soundAnswerTuples, isGround);
119 }
120
121 private Set<AnswerTuple> answerCQ_multiThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
122 Set<Future<AnswerTuple>> set = new HashSet<Future<AnswerTuple>>();
123 ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q);
124 TupleIterator tupleIterator = null;
125 ExecutorService es = null;
126 try {
127 tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters);
128 es = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
129 AnswerTuple tuple;
130 for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
131 Map<Term, GroundTerm> match = new HashMap<Term, GroundTerm>();
132 for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) {
133 match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i));
134 }
135 if ((tuple = contains(qext, soundAnswerTuples, match)) != null)
136 set.add(es.submit(new Spurious(qext, match, tuple, isGround)));
137 }
138 Set<AnswerTuple> result = new HashSet<AnswerTuple>(set.size());
139 while(!set.isEmpty()) {
140 Iterator<Future<AnswerTuple>> it = set.iterator();
141 while(it.hasNext()) {
142 Future<AnswerTuple> isReady = it.next();
143 if (isReady.isDone()) {
144 try {
145 tuple = isReady.get();
146 if (tuple != null)
147 result.add(tuple);
148 it.remove();
149 } catch (InterruptedException e) {
150 e.printStackTrace();
151 } catch (ExecutionException e) {
152 e.printStackTrace();
153 }
154 }
155 }
156 }
157 return result;
158 } catch (JRDFStoreException e1) {
159 e1.printStackTrace();
160 return null;
161 } finally {
162 if (tupleIterator != null) tupleIterator.dispose();
163 if (es != null) es.shutdown();
164 }
165 }
166
167 private Set<AnswerTuple> answerCQ_singleThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
168 ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q);
169 boolean useBushyValue = parameters.m_useBushy, allAnswersInRootValue = parameters.m_allAnswersInRoot;
170 Set<AnswerTuple> result = new HashSet<AnswerTuple>();
171
172 TupleIterator tupleIterator = null;
173 try {
174 tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters);
175 parameters.m_useBushy = false;
176 parameters.m_allAnswersInRoot = false;
177
178 AnswerTuple tuple;
179
180 for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
181 Map<Term, GroundTerm> match = new HashMap<Term, GroundTerm>();
182 for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) {
183 match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i));
184 }
185 if (((tuple = contains(qext, soundAnswerTuples, match)) != null) && (new Spurious(qext, match, tuple, isGround).call()) != null)
186 result.add(tuple);
187 }
188 } catch (JRDFStoreException e) {
189 e.printStackTrace();
190 return null;
191 } finally {
192 if (tupleIterator != null) tupleIterator.dispose();
193 parameters.m_useBushy = useBushyValue;
194 parameters.m_allAnswersInRoot = allAnswersInRootValue;
195 }
196 return result;
197 }
198
199 private AnswerTuple contains(ExtendedConjunctiveQuery qext, AnswerTuples answerTuples, Map<Term, GroundTerm> match) {
200 GroundTerm[] terms = new GroundTerm[qext.getNumberOfRealAnswerTerms()];
201 int index = 0;
202 for (Term t : qext.getRealAnswerTerms())
203 terms[index++] = match.get(t);
204 AnswerTuple tuple = new AnswerTuple(terms);
205 if (answerTuples != null && answerTuples.contains(tuple)) return null;
206 return tuple;
207 }
208
209
210 class Spurious implements Callable<AnswerTuple> {
211 private ExtendedConjunctiveQuery query;
212 private Map<Term, GroundTerm> match;
213 private AnswerTuple tuple;
214 private boolean isGround;
215
216 public Spurious(ExtendedConjunctiveQuery query, Map<Term, GroundTerm> m, AnswerTuple t, boolean isGround) {
217 this.query = query;
218 this.match = m;
219 this.tuple = t;
220 this.isGround = isGround;
221 }
222
223 public AnswerTuple call() {
224 if (isMappingAnswerVariablesToAuxiliary(query, match));
225 else {
226 if (isGround) return tuple;
227
228 EqualityConstraintRelation sim = new EqualityConstraintRelation(query, match);
229 try {
230 sim.computeRelation();
231 if (areEqualityConstraintsSatisfiedByMatch(query, sim, match)
232 && !isCyclic(query, sim, match)) {
233 return tuple;
234 }
235 } catch (ConstraintException e) {
236 Utility.logError(e.toString());
237 e.printStackTrace();
238 return null;
239 }
240 }
241 return null;
242 }
243
244 }
245
246 private boolean isMappingAnswerVariablesToAuxiliary(
247 ExtendedConjunctiveQuery conjunctiveQuery,
248 Map<Term, GroundTerm> match) {
249 for (Term ansQueryTerm : conjunctiveQuery.getRealAnswerTerms()) {
250 if (! (ansQueryTerm instanceof Individual)) {
251 GroundTerm datalog_term = match.get(ansQueryTerm);
252 if (isSyntacticAnonymous(datalog_term))
253 return true;
254 }
255 }
256 return false;
257 }
258
259 private boolean isCyclic(ExtendedConjunctiveQuery q,
260 EqualityConstraintRelation sim, Map<Term, GroundTerm> match) {
261 DirectedGraph<Term, DefaultEdge> auxGraph = new DefaultDirectedGraph<Term, DefaultEdge>(
262 DefaultEdge.class);
263 for (Term queryTerm : q.getTerms()) {
264 if (!(queryTerm instanceof Individual) && isRealAnonymous(match.get(queryTerm)))
265 auxGraph.addVertex(sim.getRepresentative(queryTerm));
266 }
267 for (Atom a : q.getAtoms())
268 if (a.getArity() == 2 && !(a.getArgument(0) instanceof Individual) && !(a.getArgument(1) instanceof Individual))
269 if (isRealAnonymous(match.get(a.getArgument(0))) && isRealAnonymous(match.get(a.getArgument(1))))
270 auxGraph.addEdge(sim.getRepresentative(a.getArgument(0)), sim.getRepresentative(a.getArgument(0)));
271 return (new CycleDetector<Term, DefaultEdge>(auxGraph)).detectCycles();
272
273 }
274
275 private boolean isRealAnonymous(GroundTerm datalog_t) {
276 if (!(datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual)) return false;
277 uk.ac.ox.cs.JRDFox.model.Individual ind = (uk.ac.ox.cs.JRDFox.model.Individual) datalog_t;
278 if (!ind.getIRI().startsWith(Namespace.KARMA_ANONY)) return false;
279
280 return equalityGroups.find(ind.getIRI()).contains(Namespace.KARMA_ANONY);
281
282// String query = "select ?x where { ?x <http://www.w3.org/2002/07/owl#sameAs> <" + ind.getIRI() + ">. } ";
283// TupleIterator tupleIterator;
284// try {
285// tupleIterator = store.compileQuery(query, prefixes, parameters);
286// } catch (JRDFStoreException e) {
287// e.printStackTrace();
288// return false;
289// }
290//
291// try {
292// GroundTerm t;
293// for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
294// t = tupleIterator.getGroundTerm(0);
295// if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual && !((uk.ac.ox.cs.JRDFox.model.Individual) t).isAnony)
296// return false;
297// }
298// } catch (JRDFStoreException e) {
299// e.printStackTrace();
300// return false;
301// } finally {
302// tupleIterator.dispose();
303// }
304// return true;
305 }
306
307 private boolean areEqualityConstraintsSatisfiedByMatch(
308 ExtendedConjunctiveQuery q, EqualityConstraintRelation sim,
309 Map<Term, GroundTerm> m) throws ConstraintException {
310 for (Term s : q.getTerms())
311 for (Term t : q.getTerms())
312 if (sim.areConstraintToBeEqual(s, t)) {
313 if (!areMappedToEqualDatalogTerms(q, m, s, t))
314 return false;
315 }
316 return true;
317 }
318
319 private boolean areMappedToEqualDatalogTerms(
320 ExtendedConjunctiveQuery q, Map<Term, GroundTerm> match,
321 Term queryTerm1, Term queryTerm2) {
322 GroundTerm datalogTerm1 = (queryTerm1 instanceof Individual) ? toRDFoxIndividual(queryTerm1) : match.get(queryTerm1);
323 GroundTerm datalogTerm2 = (queryTerm2 instanceof Individual) ? toRDFoxIndividual(queryTerm2) : match.get(queryTerm2);
324 if (datalogTerm1 != null && datalogTerm1.equals(datalogTerm2))
325 return true;
326
327 return equalityGroups.find(datalogTerm1.toString()).equals(datalogTerm2.toString());
328// String query = "prefix owl: <http://www.w3.org/2002/07/owl#> select where {"
329// + datalogTerm1
330// + " owl:sameAs "
331// + datalogTerm2
332// + ". } ";
333// TupleIterator tupleIterator;
334// try {
335// tupleIterator = store.compileQuery(query, prefixes, parameters);
336// } catch (JRDFStoreException e) {
337// e.printStackTrace();
338// return false;
339// }
340// boolean res = false;
341// try {
342// res = tupleIterator.open() != 0;
343// } catch (JRDFStoreException e) {
344// e.printStackTrace();
345// return false;
346// } finally {
347// tupleIterator.dispose();
348// }
349// return res;
350 }
351
352 private GroundTerm toRDFoxIndividual(Term t) {
353 return uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) t).getIRI());
354 }
355
356 private boolean isSyntacticAnonymous(GroundTerm datalog_t) {
357 if (datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual && ((uk.ac.ox.cs.JRDFox.model.Individual) datalog_t).getIRI().startsWith(Namespace.KARMA_ANONY))
358 return true;
359 return false;
360 }
361
362 class EqualityConstraintRelation {
363
364 private ExtendedConjunctiveQuery cq;
365 private Map<Term, GroundTerm> match;
366 private Map<Term, Set<Term>> sim;
367
368 public EqualityConstraintRelation(ExtendedConjunctiveQuery q,
369 Map<Term, GroundTerm> m) {
370 cq = q;
371 match = m;
372 sim = new HashMap<Term, Set<Term>>();
373 }
374
375 public void addSingletonClass(Term t) {
376 Set<Term> eqclass = new HashSet<Term>();
377 eqclass.add(t);
378 sim.put(t, eqclass);
379 }
380
381 public boolean areConstraintToBeEqual(Term s, Term t)
382 throws ConstraintException {
383 Term sRepresentative = getRepresentative(s);
384 Term tRepresentative = getRepresentative(t);
385 if (sRepresentative == null || tRepresentative == null) {
386 throw new ConstraintException("Cannot identify terms " + s
387 + " and " + t);
388 }
389 return sRepresentative.equals(tRepresentative);
390 }
391
392 public void constrainToBeEqual(Term s, Term t)
393 throws ConstraintException {
394 Term sRepresentative = getRepresentative(s);
395 Term tRepresentative = getRepresentative(t);
396 if (sRepresentative == null || tRepresentative == null) {
397 throw new ConstraintException("Cannot identify terms " + s
398 + " and " + t);
399 }
400 if (!sRepresentative.equals(tRepresentative)) {
401 sim.get(sRepresentative).addAll(sim.get(tRepresentative));
402 sim.remove(tRepresentative);
403 }
404 }
405
406 public Term getRepresentative(Term s) {
407 if (sim.containsKey(s))
408 return s;
409 for (Term key : sim.keySet()) {
410 if (sim.get(key).contains(s))
411 return key;
412 }
413 return null;
414 }
415
416 public Set<Term> getEquivalenceClass(Term s) {
417 if (sim.containsKey(s))
418 return sim.get(s);
419 for (Set<Term> eqClass : sim.values()) {
420 if (eqClass.contains(s))
421 return eqClass;
422 }
423 return null;
424 }
425
426 public void deriveForkConstraints() throws ConstraintException {
427 boolean newDerivedConstraints = true;
428 while (newDerivedConstraints) {
429 newDerivedConstraints = false;
430 for (Atom a1 : cq.getAtoms())
431 for (Atom a2 : cq.getAtoms()) {
432 if (a1.getArity() == 2 && a2.getArity() == 2) {
433 GroundTerm term = a1.getArgument(1) instanceof Individual ? toRDFoxIndividual(a1.getArgument(1)) : match.get(a1.getArgument(1));
434 if (areConstraintToBeEqual(a1.getArgument(1), a2.getArgument(1)) && !areConstraintToBeEqual(a1.getArgument(0),a2.getArgument(0))) {
435 if (isRealAnonymous(term)) {
436 constrainToBeEqual(a1.getArgument(0), a2.getArgument(0));
437 newDerivedConstraints = true;
438 }
439 }
440 }
441 }
442 }
443 }
444
445 public void computeRelation() throws ConstraintException {
446 for (Term t : cq.getTerms()) {
447 addSingletonClass(t);
448 }
449 deriveForkConstraints();
450 }
451
452 public String toString() {
453 String res = "";
454 for (Set<Term> terms : this.sim.values()) {
455 res += "[ ";
456 for (Term t : terms)
457 res += t + " ";
458 res += "]\n";
459 }
460 return res;
461 }
462
463 }
464
465 public void dispose() {
466 store.dispose();
467 }
468
469} \ No newline at end of file