aboutsummaryrefslogtreecommitdiff
path: root/src/org/semanticweb/karma2/MyKarma.java
diff options
context:
space:
mode:
authoryzhou <yujiao.zhou@gmail.com>2015-04-21 10:34:27 +0100
committeryzhou <yujiao.zhou@gmail.com>2015-04-21 10:34:27 +0100
commit9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8 (patch)
tree47511c0fb89dccff0db4b5990522e04f294d795b /src/org/semanticweb/karma2/MyKarma.java
parentb1ac207612ee8b045244253fb94b866104bc34f2 (diff)
downloadACQuA-9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8.tar.gz
ACQuA-9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8.zip
initial version
Diffstat (limited to 'src/org/semanticweb/karma2/MyKarma.java')
-rw-r--r--src/org/semanticweb/karma2/MyKarma.java483
1 files changed, 483 insertions, 0 deletions
diff --git a/src/org/semanticweb/karma2/MyKarma.java b/src/org/semanticweb/karma2/MyKarma.java
new file mode 100644
index 0000000..60938df
--- /dev/null
+++ b/src/org/semanticweb/karma2/MyKarma.java
@@ -0,0 +1,483 @@
1package org.semanticweb.karma2;
2
3import java.io.File;
4import java.io.FileNotFoundException;
5import java.util.Collection;
6import java.util.HashMap;
7import java.util.HashSet;
8import java.util.Iterator;
9import java.util.Map;
10import java.util.Scanner;
11import java.util.Set;
12import java.util.concurrent.Callable;
13import java.util.concurrent.ExecutionException;
14import java.util.concurrent.ExecutorService;
15import java.util.concurrent.Executors;
16import java.util.concurrent.Future;
17
18import org.jgrapht.DirectedGraph;
19import org.jgrapht.alg.CycleDetector;
20import org.jgrapht.graph.DefaultDirectedGraph;
21import org.jgrapht.graph.DefaultEdge;
22import org.semanticweb.HermiT.model.Atom;
23import org.semanticweb.HermiT.model.Individual;
24import org.semanticweb.HermiT.model.Term;
25import org.semanticweb.karma2.exception.ConstraintException;
26import org.semanticweb.karma2.model.ConjunctiveQuery;
27import org.semanticweb.karma2.model.ExtendedConjunctiveQuery;
28
29import uk.ac.ox.cs.JRDFox.model.GroundTerm;
30import uk.ac.ox.cs.JRDFox.store.DataStore;
31import uk.ac.ox.cs.JRDFox.store.Parameters;
32import uk.ac.ox.cs.JRDFox.Prefixes;
33import uk.ac.ox.cs.JRDFox.JRDFStoreException;
34import uk.ac.ox.cs.JRDFox.store.TupleIterator;
35import uk.ac.ox.cs.pagoda.MyPrefixes;
36import uk.ac.ox.cs.pagoda.query.AnswerTuple;
37import uk.ac.ox.cs.pagoda.query.AnswerTuples;
38import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine;
39import uk.ac.ox.cs.pagoda.util.Namespace;
40import uk.ac.ox.cs.pagoda.util.Timer;
41import uk.ac.ox.cs.pagoda.util.UFS;
42import uk.ac.ox.cs.pagoda.util.Utility;
43
44public class MyKarma {
45
46 private DataStore store;
47
48 private Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
49 private Parameters parameters = new Parameters();
50
51 public MyKarma() {
52 store = RDFoxQueryEngine.createDataStore();
53 parameters.m_allAnswersInRoot = true;
54 parameters.m_useBushy = true;
55 }
56
57 private UFS<String> equalityGroups = null;
58
59 public void computeEqualityGroups() {
60 if (equalityGroups != null) return ;
61 equalityGroups = new UFS<String>();
62 TupleIterator answers = null;
63 try {
64 Timer t = new Timer();
65 answers = store.compileQuery("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }", prefixes, parameters);
66 for (long multi = answers.open(); multi != 0; multi = answers.getNext()) {
67 if (answers.getResourceID(0) != answers.getResourceID(1))
68 equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm);
69 }
70 Utility.logInfo("@Time to group individuals by equality: " + t.duration());
71 } catch (JRDFStoreException e) {
72 e.printStackTrace();
73 } finally {
74 if (answers != null) answers.dispose();
75 }
76 }
77
78 public DataStore getStore() {
79 return store;
80 }
81
82 public long getNumberOfFacts() throws JRDFStoreException {
83 return store.getTriplesCount();
84 }
85
86 public void initializeData(File dataFile) throws JRDFStoreException,
87 FileNotFoundException {
88 store.importTurtleFile(dataFile, prefixes);
89 }
90
91 public void materialise(File ruleFile) throws JRDFStoreException, FileNotFoundException {
92 Timer t = new Timer();
93 Scanner scanner = new Scanner(ruleFile);
94 String datalogProgram = scanner.useDelimiter("\\Z").next();
95 scanner.close();
96 store.clearRulesAndMakeFactsExplicit();
97// store.addRules(new String[] {datalogProgram});
98 store.importRules(datalogProgram);
99 store.applyReasoning();
100 Utility.logDebug("elho-lower-store finished its own materialisation in " + t.duration() + " seconds.");
101 }
102
103 public Collection<AnswerTuple> answerCQ(ConjunctiveQuery q, boolean isGround) {
104 return answerCQ(q, null, isGround);
105 }
106
107 boolean m_multiThread = false;
108
109 public void setConcurrence(boolean multiThread) {
110 this.m_multiThread = multiThread;
111 }
112
113 public Set<AnswerTuple> answerCQ(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
114 computeEqualityGroups();
115 if (m_multiThread)
116 return answerCQ_multiThread(q, soundAnswerTuples, isGround);
117 else
118 return answerCQ_singleThread(q, soundAnswerTuples, isGround);
119 }
120
121 private Set<AnswerTuple> answerCQ_multiThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
122 Set<Future<AnswerTuple>> set = new HashSet<Future<AnswerTuple>>();
123 ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q);
124 TupleIterator tupleIterator;
125 try {
126 tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters);
127 } catch (JRDFStoreException e) {
128 e.printStackTrace();
129 return null;
130 }
131 ExecutorService es = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
132 AnswerTuple tuple;
133 try {
134 try {
135 for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
136 Map<Term, GroundTerm> match = new HashMap<Term, GroundTerm>();
137 for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) {
138 match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i));
139 }
140 if ((tuple = contains(qext, soundAnswerTuples, match)) != null)
141 set.add(es.submit(new Spurious(qext, match, tuple, isGround)));
142 }
143 } catch (JRDFStoreException e) {
144 e.printStackTrace();
145 return null;
146 } finally {
147 tupleIterator.dispose();
148 }
149 Set<AnswerTuple> result = new HashSet<AnswerTuple>(set.size());
150 while(!set.isEmpty()) {
151 Iterator<Future<AnswerTuple>> it = set.iterator();
152 while(it.hasNext()) {
153 Future<AnswerTuple> isReady = it.next();
154 if (isReady.isDone()) {
155 try {
156 tuple = isReady.get();
157 if (tuple != null)
158 result.add(tuple);
159 it.remove();
160 } catch (InterruptedException e) {
161 e.printStackTrace();
162 } catch (ExecutionException e) {
163 e.printStackTrace();
164 }
165 }
166 }
167 }
168 return result;
169 } finally {
170 es.shutdown();
171 }
172 }
173
174 private Set<AnswerTuple> answerCQ_singleThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
175 ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q);
176 TupleIterator tupleIterator;
177 try {
178 tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters);
179 } catch (JRDFStoreException e) {
180 e.printStackTrace();
181 return null;
182 }
183
184 boolean useBushyValue = parameters.m_useBushy, allAnswersInRootValue = parameters.m_allAnswersInRoot;
185 parameters.m_useBushy = false;
186 parameters.m_allAnswersInRoot = false;
187 try {
188 Set<AnswerTuple> result = new HashSet<AnswerTuple>();
189 AnswerTuple tuple;
190 try {
191 for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
192 Map<Term, GroundTerm> match = new HashMap<Term, GroundTerm>();
193 for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) {
194 match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i));
195 }
196 if (((tuple = contains(qext, soundAnswerTuples, match)) != null) && (new Spurious(qext, match, tuple, isGround).call()) != null)
197 result.add(tuple);
198 }
199 } catch (JRDFStoreException e) {
200 e.printStackTrace();
201 return null;
202 } finally {
203 tupleIterator.dispose();
204 }
205 return result;
206 } finally {
207 parameters.m_useBushy = useBushyValue;
208 parameters.m_allAnswersInRoot = allAnswersInRootValue;
209 }
210
211 }
212
213 private AnswerTuple contains(ExtendedConjunctiveQuery qext, AnswerTuples answerTuples, Map<Term, GroundTerm> match) {
214 GroundTerm[] terms = new GroundTerm[qext.getNumberOfRealAnswerTerms()];
215 int index = 0;
216 for (Term t : qext.getRealAnswerTerms())
217 terms[index++] = match.get(t);
218 AnswerTuple tuple = new AnswerTuple(terms);
219 if (answerTuples != null && answerTuples.contains(tuple)) return null;
220 return tuple;
221 }
222
223
224 class Spurious implements Callable<AnswerTuple> {
225 private ExtendedConjunctiveQuery query;
226 private Map<Term, GroundTerm> match;
227 private AnswerTuple tuple;
228 private boolean isGround;
229
230 public Spurious(ExtendedConjunctiveQuery query, Map<Term, GroundTerm> m, AnswerTuple t, boolean isGround) {
231 this.query = query;
232 this.match = m;
233 this.tuple = t;
234 this.isGround = isGround;
235 }
236
237 public AnswerTuple call() {
238 if (isMappingAnswerVariablesToAuxiliary(query, match));
239 else {
240 if (isGround) return tuple;
241
242 EqualityConstraintRelation sim = new EqualityConstraintRelation(query, match);
243 try {
244 sim.computeRelation();
245 if (areEqualityConstraintsSatisfiedByMatch(query, sim, match)
246 && !isCyclic(query, sim, match)) {
247 return tuple;
248 }
249 } catch (ConstraintException e) {
250 Utility.logError(e.toString());
251 e.printStackTrace();
252 return null;
253 }
254 }
255 return null;
256 }
257
258 }
259
260 private boolean isMappingAnswerVariablesToAuxiliary(
261 ExtendedConjunctiveQuery conjunctiveQuery,
262 Map<Term, GroundTerm> match) {
263 for (Term ansQueryTerm : conjunctiveQuery.getRealAnswerTerms()) {
264 if (! (ansQueryTerm instanceof Individual)) {
265 GroundTerm datalog_term = match.get(ansQueryTerm);
266 if (isSyntacticAnonymous(datalog_term))
267 return true;
268 }
269 }
270 return false;
271 }
272
273 private boolean isCyclic(ExtendedConjunctiveQuery q,
274 EqualityConstraintRelation sim, Map<Term, GroundTerm> match) {
275 DirectedGraph<Term, DefaultEdge> auxGraph = new DefaultDirectedGraph<Term, DefaultEdge>(
276 DefaultEdge.class);
277 for (Term queryTerm : q.getTerms()) {
278 if (!(queryTerm instanceof Individual) && isRealAnonymous(match.get(queryTerm)))
279 auxGraph.addVertex(sim.getRepresentative(queryTerm));
280 }
281 for (Atom a : q.getAtoms())
282 if (a.getArity() == 2 && !(a.getArgument(0) instanceof Individual) && !(a.getArgument(1) instanceof Individual))
283 if (isRealAnonymous(match.get(a.getArgument(0))) && isRealAnonymous(match.get(a.getArgument(1))))
284 auxGraph.addEdge(sim.getRepresentative(a.getArgument(0)), sim.getRepresentative(a.getArgument(0)));
285 return (new CycleDetector<Term, DefaultEdge>(auxGraph)).detectCycles();
286
287 }
288
289 private boolean isRealAnonymous(GroundTerm datalog_t) {
290 if (!(datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual)) return false;
291 uk.ac.ox.cs.JRDFox.model.Individual ind = (uk.ac.ox.cs.JRDFox.model.Individual) datalog_t;
292 if (!ind.getIRI().startsWith(Namespace.KARMA_ANONY)) return false;
293
294 return equalityGroups.find(ind.getIRI()).contains(Namespace.KARMA_ANONY);
295
296// String query = "select ?x where { ?x <http://www.w3.org/2002/07/owl#sameAs> <" + ind.getIRI() + ">. } ";
297// TupleIterator tupleIterator;
298// try {
299// tupleIterator = store.compileQuery(query, prefixes, parameters);
300// } catch (JRDFStoreException e) {
301// e.printStackTrace();
302// return false;
303// }
304//
305// try {
306// GroundTerm t;
307// for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
308// t = tupleIterator.getGroundTerm(0);
309// if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual && !((uk.ac.ox.cs.JRDFox.model.Individual) t).isAnony)
310// return false;
311// }
312// } catch (JRDFStoreException e) {
313// e.printStackTrace();
314// return false;
315// } finally {
316// tupleIterator.dispose();
317// }
318// return true;
319 }
320
321 private boolean areEqualityConstraintsSatisfiedByMatch(
322 ExtendedConjunctiveQuery q, EqualityConstraintRelation sim,
323 Map<Term, GroundTerm> m) throws ConstraintException {
324 for (Term s : q.getTerms())
325 for (Term t : q.getTerms())
326 if (sim.areConstraintToBeEqual(s, t)) {
327 if (!areMappedToEqualDatalogTerms(q, m, s, t))
328 return false;
329 }
330 return true;
331 }
332
333 private boolean areMappedToEqualDatalogTerms(
334 ExtendedConjunctiveQuery q, Map<Term, GroundTerm> match,
335 Term queryTerm1, Term queryTerm2) {
336 GroundTerm datalogTerm1 = (queryTerm1 instanceof Individual) ? toRDFoxIndividual(queryTerm1) : match.get(queryTerm1);
337 GroundTerm datalogTerm2 = (queryTerm2 instanceof Individual) ? toRDFoxIndividual(queryTerm2) : match.get(queryTerm2);
338 if (datalogTerm1 != null && datalogTerm1.equals(datalogTerm2))
339 return true;
340
341 return equalityGroups.find(datalogTerm1.toString()).equals(datalogTerm2.toString());
342// String query = "prefix owl: <http://www.w3.org/2002/07/owl#> select where {"
343// + datalogTerm1
344// + " owl:sameAs "
345// + datalogTerm2
346// + ". } ";
347// TupleIterator tupleIterator;
348// try {
349// tupleIterator = store.compileQuery(query, prefixes, parameters);
350// } catch (JRDFStoreException e) {
351// e.printStackTrace();
352// return false;
353// }
354// boolean res = false;
355// try {
356// res = tupleIterator.open() != 0;
357// } catch (JRDFStoreException e) {
358// e.printStackTrace();
359// return false;
360// } finally {
361// tupleIterator.dispose();
362// }
363// return res;
364 }
365
366 private GroundTerm toRDFoxIndividual(Term t) {
367 return uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) t).getIRI());
368 }
369
370 private boolean isSyntacticAnonymous(GroundTerm datalog_t) {
371 if (datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual && ((uk.ac.ox.cs.JRDFox.model.Individual) datalog_t).getIRI().startsWith(Namespace.KARMA_ANONY))
372 return true;
373 return false;
374 }
375
376 class EqualityConstraintRelation {
377
378 private ExtendedConjunctiveQuery cq;
379 private Map<Term, GroundTerm> match;
380 private Map<Term, Set<Term>> sim;
381
382 public EqualityConstraintRelation(ExtendedConjunctiveQuery q,
383 Map<Term, GroundTerm> m) {
384 cq = q;
385 match = m;
386 sim = new HashMap<Term, Set<Term>>();
387 }
388
389 public void addSingletonClass(Term t) {
390 Set<Term> eqclass = new HashSet<Term>();
391 eqclass.add(t);
392 sim.put(t, eqclass);
393 }
394
395 public boolean areConstraintToBeEqual(Term s, Term t)
396 throws ConstraintException {
397 Term sRepresentative = getRepresentative(s);
398 Term tRepresentative = getRepresentative(t);
399 if (sRepresentative == null || tRepresentative == null) {
400 throw new ConstraintException("Cannot identify terms " + s
401 + " and " + t);
402 }
403 return sRepresentative.equals(tRepresentative);
404 }
405
406 public void constrainToBeEqual(Term s, Term t)
407 throws ConstraintException {
408 Term sRepresentative = getRepresentative(s);
409 Term tRepresentative = getRepresentative(t);
410 if (sRepresentative == null || tRepresentative == null) {
411 throw new ConstraintException("Cannot identify terms " + s
412 + " and " + t);
413 }
414 if (!sRepresentative.equals(tRepresentative)) {
415 sim.get(sRepresentative).addAll(sim.get(tRepresentative));
416 sim.remove(tRepresentative);
417 }
418 }
419
420 public Term getRepresentative(Term s) {
421 if (sim.containsKey(s))
422 return s;
423 for (Term key : sim.keySet()) {
424 if (sim.get(key).contains(s))
425 return key;
426 }
427 return null;
428 }
429
430 public Set<Term> getEquivalenceClass(Term s) {
431 if (sim.containsKey(s))
432 return sim.get(s);
433 for (Set<Term> eqClass : sim.values()) {
434 if (eqClass.contains(s))
435 return eqClass;
436 }
437 return null;
438 }
439
440 public void deriveForkConstraints() throws ConstraintException {
441 boolean newDerivedConstraints = true;
442 while (newDerivedConstraints) {
443 newDerivedConstraints = false;
444 for (Atom a1 : cq.getAtoms())
445 for (Atom a2 : cq.getAtoms()) {
446 if (a1.getArity() == 2 && a2.getArity() == 2) {
447 GroundTerm term = a1.getArgument(1) instanceof Individual ? toRDFoxIndividual(a1.getArgument(1)) : match.get(a1.getArgument(1));
448 if (areConstraintToBeEqual(a1.getArgument(1), a2.getArgument(1)) && !areConstraintToBeEqual(a1.getArgument(0),a2.getArgument(0))) {
449 if (isRealAnonymous(term)) {
450 constrainToBeEqual(a1.getArgument(0), a2.getArgument(0));
451 newDerivedConstraints = true;
452 }
453 }
454 }
455 }
456 }
457 }
458
459 public void computeRelation() throws ConstraintException {
460 for (Term t : cq.getTerms()) {
461 addSingletonClass(t);
462 }
463 deriveForkConstraints();
464 }
465
466 public String toString() {
467 String res = "";
468 for (Set<Term> terms : this.sim.values()) {
469 res += "[ ";
470 for (Term t : terms)
471 res += t + " ";
472 res += "]\n";
473 }
474 return res;
475 }
476
477 }
478
479 public void dispose() {
480 store.dispose();
481 }
482
483} \ No newline at end of file