aboutsummaryrefslogtreecommitdiff
path: root/src/org/semanticweb
diff options
context:
space:
mode:
authoryzhou <yujiao.zhou@gmail.com>2015-04-21 10:34:27 +0100
committeryzhou <yujiao.zhou@gmail.com>2015-04-21 10:34:27 +0100
commit9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8 (patch)
tree47511c0fb89dccff0db4b5990522e04f294d795b /src/org/semanticweb
parentb1ac207612ee8b045244253fb94b866104bc34f2 (diff)
downloadACQuA-9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8.tar.gz
ACQuA-9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8.zip
initial version
Diffstat (limited to 'src/org/semanticweb')
-rw-r--r--src/org/semanticweb/karma2/MyKarma.java483
-rw-r--r--src/org/semanticweb/karma2/clausifier/OntologyProcesser.java572
-rw-r--r--src/org/semanticweb/karma2/exception/ConstraintException.java17
-rw-r--r--src/org/semanticweb/karma2/exception/IllegalInputOntologyException.java9
-rw-r--r--src/org/semanticweb/karma2/exception/IllegalInputQueryException.java14
-rw-r--r--src/org/semanticweb/karma2/exception/QueryExecutionException.java13
-rw-r--r--src/org/semanticweb/karma2/model/ConjunctiveQuery.java80
-rw-r--r--src/org/semanticweb/karma2/model/Equality.java36
-rw-r--r--src/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java94
-rw-r--r--src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g140
-rw-r--r--src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens36
-rw-r--r--src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java814
-rw-r--r--src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java1611
-rw-r--r--src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java179
-rw-r--r--src/org/semanticweb/karma2/profile/ELHOProfile.java281
-rw-r--r--src/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java913
-rw-r--r--src/org/semanticweb/simpleETL/RDFHandlerWriter.java45
-rw-r--r--src/org/semanticweb/simpleETL/SimpleETL.java71
18 files changed, 5408 insertions, 0 deletions
diff --git a/src/org/semanticweb/karma2/MyKarma.java b/src/org/semanticweb/karma2/MyKarma.java
new file mode 100644
index 0000000..60938df
--- /dev/null
+++ b/src/org/semanticweb/karma2/MyKarma.java
@@ -0,0 +1,483 @@
1package org.semanticweb.karma2;
2
3import java.io.File;
4import java.io.FileNotFoundException;
5import java.util.Collection;
6import java.util.HashMap;
7import java.util.HashSet;
8import java.util.Iterator;
9import java.util.Map;
10import java.util.Scanner;
11import java.util.Set;
12import java.util.concurrent.Callable;
13import java.util.concurrent.ExecutionException;
14import java.util.concurrent.ExecutorService;
15import java.util.concurrent.Executors;
16import java.util.concurrent.Future;
17
18import org.jgrapht.DirectedGraph;
19import org.jgrapht.alg.CycleDetector;
20import org.jgrapht.graph.DefaultDirectedGraph;
21import org.jgrapht.graph.DefaultEdge;
22import org.semanticweb.HermiT.model.Atom;
23import org.semanticweb.HermiT.model.Individual;
24import org.semanticweb.HermiT.model.Term;
25import org.semanticweb.karma2.exception.ConstraintException;
26import org.semanticweb.karma2.model.ConjunctiveQuery;
27import org.semanticweb.karma2.model.ExtendedConjunctiveQuery;
28
29import uk.ac.ox.cs.JRDFox.model.GroundTerm;
30import uk.ac.ox.cs.JRDFox.store.DataStore;
31import uk.ac.ox.cs.JRDFox.store.Parameters;
32import uk.ac.ox.cs.JRDFox.Prefixes;
33import uk.ac.ox.cs.JRDFox.JRDFStoreException;
34import uk.ac.ox.cs.JRDFox.store.TupleIterator;
35import uk.ac.ox.cs.pagoda.MyPrefixes;
36import uk.ac.ox.cs.pagoda.query.AnswerTuple;
37import uk.ac.ox.cs.pagoda.query.AnswerTuples;
38import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine;
39import uk.ac.ox.cs.pagoda.util.Namespace;
40import uk.ac.ox.cs.pagoda.util.Timer;
41import uk.ac.ox.cs.pagoda.util.UFS;
42import uk.ac.ox.cs.pagoda.util.Utility;
43
44public class MyKarma {
45
46 private DataStore store;
47
48 private Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
49 private Parameters parameters = new Parameters();
50
51 public MyKarma() {
52 store = RDFoxQueryEngine.createDataStore();
53 parameters.m_allAnswersInRoot = true;
54 parameters.m_useBushy = true;
55 }
56
57 private UFS<String> equalityGroups = null;
58
59 public void computeEqualityGroups() {
60 if (equalityGroups != null) return ;
61 equalityGroups = new UFS<String>();
62 TupleIterator answers = null;
63 try {
64 Timer t = new Timer();
65 answers = store.compileQuery("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }", prefixes, parameters);
66 for (long multi = answers.open(); multi != 0; multi = answers.getNext()) {
67 if (answers.getResourceID(0) != answers.getResourceID(1))
68 equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm);
69 }
70 Utility.logInfo("@Time to group individuals by equality: " + t.duration());
71 } catch (JRDFStoreException e) {
72 e.printStackTrace();
73 } finally {
74 if (answers != null) answers.dispose();
75 }
76 }
77
78 public DataStore getStore() {
79 return store;
80 }
81
82 public long getNumberOfFacts() throws JRDFStoreException {
83 return store.getTriplesCount();
84 }
85
86 public void initializeData(File dataFile) throws JRDFStoreException,
87 FileNotFoundException {
88 store.importTurtleFile(dataFile, prefixes);
89 }
90
91 public void materialise(File ruleFile) throws JRDFStoreException, FileNotFoundException {
92 Timer t = new Timer();
93 Scanner scanner = new Scanner(ruleFile);
94 String datalogProgram = scanner.useDelimiter("\\Z").next();
95 scanner.close();
96 store.clearRulesAndMakeFactsExplicit();
97// store.addRules(new String[] {datalogProgram});
98 store.importRules(datalogProgram);
99 store.applyReasoning();
100 Utility.logDebug("elho-lower-store finished its own materialisation in " + t.duration() + " seconds.");
101 }
102
103 public Collection<AnswerTuple> answerCQ(ConjunctiveQuery q, boolean isGround) {
104 return answerCQ(q, null, isGround);
105 }
106
107 boolean m_multiThread = false;
108
109 public void setConcurrence(boolean multiThread) {
110 this.m_multiThread = multiThread;
111 }
112
113 public Set<AnswerTuple> answerCQ(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
114 computeEqualityGroups();
115 if (m_multiThread)
116 return answerCQ_multiThread(q, soundAnswerTuples, isGround);
117 else
118 return answerCQ_singleThread(q, soundAnswerTuples, isGround);
119 }
120
121 private Set<AnswerTuple> answerCQ_multiThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
122 Set<Future<AnswerTuple>> set = new HashSet<Future<AnswerTuple>>();
123 ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q);
124 TupleIterator tupleIterator;
125 try {
126 tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters);
127 } catch (JRDFStoreException e) {
128 e.printStackTrace();
129 return null;
130 }
131 ExecutorService es = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
132 AnswerTuple tuple;
133 try {
134 try {
135 for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
136 Map<Term, GroundTerm> match = new HashMap<Term, GroundTerm>();
137 for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) {
138 match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i));
139 }
140 if ((tuple = contains(qext, soundAnswerTuples, match)) != null)
141 set.add(es.submit(new Spurious(qext, match, tuple, isGround)));
142 }
143 } catch (JRDFStoreException e) {
144 e.printStackTrace();
145 return null;
146 } finally {
147 tupleIterator.dispose();
148 }
149 Set<AnswerTuple> result = new HashSet<AnswerTuple>(set.size());
150 while(!set.isEmpty()) {
151 Iterator<Future<AnswerTuple>> it = set.iterator();
152 while(it.hasNext()) {
153 Future<AnswerTuple> isReady = it.next();
154 if (isReady.isDone()) {
155 try {
156 tuple = isReady.get();
157 if (tuple != null)
158 result.add(tuple);
159 it.remove();
160 } catch (InterruptedException e) {
161 e.printStackTrace();
162 } catch (ExecutionException e) {
163 e.printStackTrace();
164 }
165 }
166 }
167 }
168 return result;
169 } finally {
170 es.shutdown();
171 }
172 }
173
174 private Set<AnswerTuple> answerCQ_singleThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) {
175 ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q);
176 TupleIterator tupleIterator;
177 try {
178 tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters);
179 } catch (JRDFStoreException e) {
180 e.printStackTrace();
181 return null;
182 }
183
184 boolean useBushyValue = parameters.m_useBushy, allAnswersInRootValue = parameters.m_allAnswersInRoot;
185 parameters.m_useBushy = false;
186 parameters.m_allAnswersInRoot = false;
187 try {
188 Set<AnswerTuple> result = new HashSet<AnswerTuple>();
189 AnswerTuple tuple;
190 try {
191 for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
192 Map<Term, GroundTerm> match = new HashMap<Term, GroundTerm>();
193 for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) {
194 match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i));
195 }
196 if (((tuple = contains(qext, soundAnswerTuples, match)) != null) && (new Spurious(qext, match, tuple, isGround).call()) != null)
197 result.add(tuple);
198 }
199 } catch (JRDFStoreException e) {
200 e.printStackTrace();
201 return null;
202 } finally {
203 tupleIterator.dispose();
204 }
205 return result;
206 } finally {
207 parameters.m_useBushy = useBushyValue;
208 parameters.m_allAnswersInRoot = allAnswersInRootValue;
209 }
210
211 }
212
213 private AnswerTuple contains(ExtendedConjunctiveQuery qext, AnswerTuples answerTuples, Map<Term, GroundTerm> match) {
214 GroundTerm[] terms = new GroundTerm[qext.getNumberOfRealAnswerTerms()];
215 int index = 0;
216 for (Term t : qext.getRealAnswerTerms())
217 terms[index++] = match.get(t);
218 AnswerTuple tuple = new AnswerTuple(terms);
219 if (answerTuples != null && answerTuples.contains(tuple)) return null;
220 return tuple;
221 }
222
223
224 class Spurious implements Callable<AnswerTuple> {
225 private ExtendedConjunctiveQuery query;
226 private Map<Term, GroundTerm> match;
227 private AnswerTuple tuple;
228 private boolean isGround;
229
230 public Spurious(ExtendedConjunctiveQuery query, Map<Term, GroundTerm> m, AnswerTuple t, boolean isGround) {
231 this.query = query;
232 this.match = m;
233 this.tuple = t;
234 this.isGround = isGround;
235 }
236
237 public AnswerTuple call() {
238 if (isMappingAnswerVariablesToAuxiliary(query, match));
239 else {
240 if (isGround) return tuple;
241
242 EqualityConstraintRelation sim = new EqualityConstraintRelation(query, match);
243 try {
244 sim.computeRelation();
245 if (areEqualityConstraintsSatisfiedByMatch(query, sim, match)
246 && !isCyclic(query, sim, match)) {
247 return tuple;
248 }
249 } catch (ConstraintException e) {
250 Utility.logError(e.toString());
251 e.printStackTrace();
252 return null;
253 }
254 }
255 return null;
256 }
257
258 }
259
260 private boolean isMappingAnswerVariablesToAuxiliary(
261 ExtendedConjunctiveQuery conjunctiveQuery,
262 Map<Term, GroundTerm> match) {
263 for (Term ansQueryTerm : conjunctiveQuery.getRealAnswerTerms()) {
264 if (! (ansQueryTerm instanceof Individual)) {
265 GroundTerm datalog_term = match.get(ansQueryTerm);
266 if (isSyntacticAnonymous(datalog_term))
267 return true;
268 }
269 }
270 return false;
271 }
272
273 private boolean isCyclic(ExtendedConjunctiveQuery q,
274 EqualityConstraintRelation sim, Map<Term, GroundTerm> match) {
275 DirectedGraph<Term, DefaultEdge> auxGraph = new DefaultDirectedGraph<Term, DefaultEdge>(
276 DefaultEdge.class);
277 for (Term queryTerm : q.getTerms()) {
278 if (!(queryTerm instanceof Individual) && isRealAnonymous(match.get(queryTerm)))
279 auxGraph.addVertex(sim.getRepresentative(queryTerm));
280 }
281 for (Atom a : q.getAtoms())
282 if (a.getArity() == 2 && !(a.getArgument(0) instanceof Individual) && !(a.getArgument(1) instanceof Individual))
283 if (isRealAnonymous(match.get(a.getArgument(0))) && isRealAnonymous(match.get(a.getArgument(1))))
284 auxGraph.addEdge(sim.getRepresentative(a.getArgument(0)), sim.getRepresentative(a.getArgument(0)));
285 return (new CycleDetector<Term, DefaultEdge>(auxGraph)).detectCycles();
286
287 }
288
289 private boolean isRealAnonymous(GroundTerm datalog_t) {
290 if (!(datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual)) return false;
291 uk.ac.ox.cs.JRDFox.model.Individual ind = (uk.ac.ox.cs.JRDFox.model.Individual) datalog_t;
292 if (!ind.getIRI().startsWith(Namespace.KARMA_ANONY)) return false;
293
294 return equalityGroups.find(ind.getIRI()).contains(Namespace.KARMA_ANONY);
295
296// String query = "select ?x where { ?x <http://www.w3.org/2002/07/owl#sameAs> <" + ind.getIRI() + ">. } ";
297// TupleIterator tupleIterator;
298// try {
299// tupleIterator = store.compileQuery(query, prefixes, parameters);
300// } catch (JRDFStoreException e) {
301// e.printStackTrace();
302// return false;
303// }
304//
305// try {
306// GroundTerm t;
307// for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) {
308// t = tupleIterator.getGroundTerm(0);
309// if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual && !((uk.ac.ox.cs.JRDFox.model.Individual) t).isAnony)
310// return false;
311// }
312// } catch (JRDFStoreException e) {
313// e.printStackTrace();
314// return false;
315// } finally {
316// tupleIterator.dispose();
317// }
318// return true;
319 }
320
321 private boolean areEqualityConstraintsSatisfiedByMatch(
322 ExtendedConjunctiveQuery q, EqualityConstraintRelation sim,
323 Map<Term, GroundTerm> m) throws ConstraintException {
324 for (Term s : q.getTerms())
325 for (Term t : q.getTerms())
326 if (sim.areConstraintToBeEqual(s, t)) {
327 if (!areMappedToEqualDatalogTerms(q, m, s, t))
328 return false;
329 }
330 return true;
331 }
332
333 private boolean areMappedToEqualDatalogTerms(
334 ExtendedConjunctiveQuery q, Map<Term, GroundTerm> match,
335 Term queryTerm1, Term queryTerm2) {
336 GroundTerm datalogTerm1 = (queryTerm1 instanceof Individual) ? toRDFoxIndividual(queryTerm1) : match.get(queryTerm1);
337 GroundTerm datalogTerm2 = (queryTerm2 instanceof Individual) ? toRDFoxIndividual(queryTerm2) : match.get(queryTerm2);
338 if (datalogTerm1 != null && datalogTerm1.equals(datalogTerm2))
339 return true;
340
341 return equalityGroups.find(datalogTerm1.toString()).equals(datalogTerm2.toString());
342// String query = "prefix owl: <http://www.w3.org/2002/07/owl#> select where {"
343// + datalogTerm1
344// + " owl:sameAs "
345// + datalogTerm2
346// + ". } ";
347// TupleIterator tupleIterator;
348// try {
349// tupleIterator = store.compileQuery(query, prefixes, parameters);
350// } catch (JRDFStoreException e) {
351// e.printStackTrace();
352// return false;
353// }
354// boolean res = false;
355// try {
356// res = tupleIterator.open() != 0;
357// } catch (JRDFStoreException e) {
358// e.printStackTrace();
359// return false;
360// } finally {
361// tupleIterator.dispose();
362// }
363// return res;
364 }
365
366 private GroundTerm toRDFoxIndividual(Term t) {
367 return uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) t).getIRI());
368 }
369
370 private boolean isSyntacticAnonymous(GroundTerm datalog_t) {
371 if (datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual && ((uk.ac.ox.cs.JRDFox.model.Individual) datalog_t).getIRI().startsWith(Namespace.KARMA_ANONY))
372 return true;
373 return false;
374 }
375
376 class EqualityConstraintRelation {
377
378 private ExtendedConjunctiveQuery cq;
379 private Map<Term, GroundTerm> match;
380 private Map<Term, Set<Term>> sim;
381
382 public EqualityConstraintRelation(ExtendedConjunctiveQuery q,
383 Map<Term, GroundTerm> m) {
384 cq = q;
385 match = m;
386 sim = new HashMap<Term, Set<Term>>();
387 }
388
389 public void addSingletonClass(Term t) {
390 Set<Term> eqclass = new HashSet<Term>();
391 eqclass.add(t);
392 sim.put(t, eqclass);
393 }
394
395 public boolean areConstraintToBeEqual(Term s, Term t)
396 throws ConstraintException {
397 Term sRepresentative = getRepresentative(s);
398 Term tRepresentative = getRepresentative(t);
399 if (sRepresentative == null || tRepresentative == null) {
400 throw new ConstraintException("Cannot identify terms " + s
401 + " and " + t);
402 }
403 return sRepresentative.equals(tRepresentative);
404 }
405
406 public void constrainToBeEqual(Term s, Term t)
407 throws ConstraintException {
408 Term sRepresentative = getRepresentative(s);
409 Term tRepresentative = getRepresentative(t);
410 if (sRepresentative == null || tRepresentative == null) {
411 throw new ConstraintException("Cannot identify terms " + s
412 + " and " + t);
413 }
414 if (!sRepresentative.equals(tRepresentative)) {
415 sim.get(sRepresentative).addAll(sim.get(tRepresentative));
416 sim.remove(tRepresentative);
417 }
418 }
419
420 public Term getRepresentative(Term s) {
421 if (sim.containsKey(s))
422 return s;
423 for (Term key : sim.keySet()) {
424 if (sim.get(key).contains(s))
425 return key;
426 }
427 return null;
428 }
429
430 public Set<Term> getEquivalenceClass(Term s) {
431 if (sim.containsKey(s))
432 return sim.get(s);
433 for (Set<Term> eqClass : sim.values()) {
434 if (eqClass.contains(s))
435 return eqClass;
436 }
437 return null;
438 }
439
440 public void deriveForkConstraints() throws ConstraintException {
441 boolean newDerivedConstraints = true;
442 while (newDerivedConstraints) {
443 newDerivedConstraints = false;
444 for (Atom a1 : cq.getAtoms())
445 for (Atom a2 : cq.getAtoms()) {
446 if (a1.getArity() == 2 && a2.getArity() == 2) {
447 GroundTerm term = a1.getArgument(1) instanceof Individual ? toRDFoxIndividual(a1.getArgument(1)) : match.get(a1.getArgument(1));
448 if (areConstraintToBeEqual(a1.getArgument(1), a2.getArgument(1)) && !areConstraintToBeEqual(a1.getArgument(0),a2.getArgument(0))) {
449 if (isRealAnonymous(term)) {
450 constrainToBeEqual(a1.getArgument(0), a2.getArgument(0));
451 newDerivedConstraints = true;
452 }
453 }
454 }
455 }
456 }
457 }
458
459 public void computeRelation() throws ConstraintException {
460 for (Term t : cq.getTerms()) {
461 addSingletonClass(t);
462 }
463 deriveForkConstraints();
464 }
465
466 public String toString() {
467 String res = "";
468 for (Set<Term> terms : this.sim.values()) {
469 res += "[ ";
470 for (Term t : terms)
471 res += t + " ";
472 res += "]\n";
473 }
474 return res;
475 }
476
477 }
478
479 public void dispose() {
480 store.dispose();
481 }
482
483} \ No newline at end of file
diff --git a/src/org/semanticweb/karma2/clausifier/OntologyProcesser.java b/src/org/semanticweb/karma2/clausifier/OntologyProcesser.java
new file mode 100644
index 0000000..fee8dff
--- /dev/null
+++ b/src/org/semanticweb/karma2/clausifier/OntologyProcesser.java
@@ -0,0 +1,572 @@
1package org.semanticweb.karma2.clausifier;
2
3import java.io.BufferedWriter;
4import java.io.File;
5import java.io.FileNotFoundException;
6import java.io.FileWriter;
7import java.io.IOException;
8import java.io.PrintWriter;
9import java.util.ArrayList;
10import java.util.Collection;
11import java.util.HashSet;
12import java.util.LinkedHashSet;
13import java.util.List;
14import java.util.Set;
15
16import org.semanticweb.HermiT.model.Atom;
17import org.semanticweb.HermiT.model.AtomicConcept;
18import org.semanticweb.HermiT.model.AtomicRole;
19import org.semanticweb.HermiT.model.DLClause;
20import org.semanticweb.HermiT.model.Individual;
21import org.semanticweb.HermiT.model.Role;
22import org.semanticweb.HermiT.model.Term;
23import org.semanticweb.HermiT.model.Variable;
24import org.semanticweb.HermiT.structural.BuiltInPropertyManager;
25import org.semanticweb.HermiT.structural.OWLAxioms;
26import org.semanticweb.HermiT.structural.OWLAxiomsExpressivity;
27import org.semanticweb.HermiT.structural.OWLNormalization;
28import org.semanticweb.HermiT.structural.ObjectPropertyInclusionManager;
29import org.semanticweb.karma2.exception.IllegalInputOntologyException;
30import org.semanticweb.karma2.model.Equality;
31import org.semanticweb.karma2.profile.ELHOProfile;
32import org.semanticweb.owlapi.model.OWLClass;
33import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
34import org.semanticweb.owlapi.model.OWLClassExpression;
35import org.semanticweb.owlapi.model.OWLClassExpressionVisitor;
36import org.semanticweb.owlapi.model.OWLDataAllValuesFrom;
37import org.semanticweb.owlapi.model.OWLDataExactCardinality;
38import org.semanticweb.owlapi.model.OWLDataFactory;
39import org.semanticweb.owlapi.model.OWLDataHasValue;
40import org.semanticweb.owlapi.model.OWLDataMaxCardinality;
41import org.semanticweb.owlapi.model.OWLDataMinCardinality;
42import org.semanticweb.owlapi.model.OWLDataProperty;
43import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom;
44import org.semanticweb.owlapi.model.OWLDataPropertyExpression;
45import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom;
46import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom;
47import org.semanticweb.owlapi.model.OWLIndividual;
48import org.semanticweb.owlapi.model.OWLIndividualAxiom;
49import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom;
50import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom;
51import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom;
52import org.semanticweb.owlapi.model.OWLObjectComplementOf;
53import org.semanticweb.owlapi.model.OWLObjectExactCardinality;
54import org.semanticweb.owlapi.model.OWLObjectHasSelf;
55import org.semanticweb.owlapi.model.OWLObjectHasValue;
56import org.semanticweb.owlapi.model.OWLObjectIntersectionOf;
57import org.semanticweb.owlapi.model.OWLObjectInverseOf;
58import org.semanticweb.owlapi.model.OWLObjectMaxCardinality;
59import org.semanticweb.owlapi.model.OWLObjectMinCardinality;
60import org.semanticweb.owlapi.model.OWLObjectOneOf;
61import org.semanticweb.owlapi.model.OWLObjectProperty;
62import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
63import org.semanticweb.owlapi.model.OWLObjectPropertyExpression;
64import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom;
65import org.semanticweb.owlapi.model.OWLObjectUnionOf;
66import org.semanticweb.owlapi.model.OWLOntology;
67import org.semanticweb.owlapi.model.OWLSameIndividualAxiom;
68import org.semanticweb.owlapi.profiles.OWLProfileReport;
69import org.semanticweb.owlapi.util.OWLAxiomVisitorAdapter;
70
71import uk.ac.ox.cs.pagoda.util.Utility;
72
73public class OntologyProcesser {
74
75
76 protected static final Variable X=Variable.create("?X");
77 protected static final Variable Y=Variable.create("?Y");
78 protected static final Variable Z=Variable.create("?Z");
79
80
81 public static void transformOntology(OWLOntology root, File dataFile, File ruleFile) throws IllegalInputOntologyException {
82 ELHOProfile profile = new ELHOProfile();
83 OWLProfileReport report = profile.checkOntology(root);
84 if (!report.isInProfile()) {
85 Utility.logError(report.toString());
86 throw new IllegalInputOntologyException("the ontology is not ELHO");
87 }
88 OntologyProcesser processer = new OntologyProcesser();
89 processer.preprocessAndClausify(root, dataFile, ruleFile);
90 }
91
92
93 private void preprocessAndClausify(OWLOntology rootOntology, File dataFile, File ruleFile) {
94 OWLDataFactory factory=rootOntology.getOWLOntologyManager().getOWLDataFactory();
95 String ontologyIRI=rootOntology.getOntologyID().getDefaultDocumentIRI()==null ? "urn:hermit:kb" : rootOntology.getOntologyID().getDefaultDocumentIRI().toString();
96 Collection<OWLOntology> importClosure=rootOntology.getImportsClosure();
97 OWLAxioms axioms=new OWLAxioms();
98 OWLNormalization normalization=new OWLNormalization(factory,axioms,0);
99 for (OWLOntology ontology : importClosure) {
100 normalization.processOntology(ontology);
101 }
102 BuiltInPropertyManager builtInPropertyManager=new BuiltInPropertyManager(factory);
103 builtInPropertyManager.axiomatizeBuiltInPropertiesAsNeeded(axioms);
104 ObjectPropertyInclusionManager objectPropertyInclusionManager=new ObjectPropertyInclusionManager(axioms);
105 objectPropertyInclusionManager.rewriteAxioms(factory,axioms,0);
106 OWLAxiomsExpressivity axiomsExpressivity=new OWLAxiomsExpressivity(axioms);
107 clausify(factory,ontologyIRI,axioms,axiomsExpressivity, dataFile,ruleFile);
108 writeTopRules(rootOntology.getClassesInSignature(), rootOntology.getObjectPropertiesInSignature(), ruleFile);
109
110}
111
112
113 private void writeTopRules(Set<OWLClass> classes, Set<OWLObjectProperty> properties, File ruleFile) {
114 PrintWriter writer = null;
115 try {
116 writer = new PrintWriter(new BufferedWriter(new FileWriter(ruleFile, true)));
117 for (OWLClass cls : classes) {
118 writer.println("<http://www.w3.org/2002/07/owl#Thing>(?X) :- <" + cls.toStringID() + ">(?X).");
119 }
120
121 for (OWLObjectProperty prop : properties) {
122 writer.println("<http://www.w3.org/2002/07/owl#Thing>(?X) :- <" + prop.toStringID() + ">(?X,?Y).");
123 writer.println("<http://www.w3.org/2002/07/owl#Thing>(?Y) :- <" + prop.toStringID() + ">(?X,?Y).");
124 }
125
126 }catch (FileNotFoundException e) {
127 e.printStackTrace();
128 } catch (IOException e) {
129 e.printStackTrace();
130 } finally{
131 writer.close();
132 classes.clear();
133 properties.clear();
134 }
135
136 }
137
138 private void writeDataFile(Set<Atom> positiveFacts, File dataFile) {
139 PrintWriter writer = null;
140 try {
141 writer = new PrintWriter(dataFile);
142 for (Atom a: positiveFacts) {
143 if (a.getArity() == 1) {
144 writer.println(a.getArgument(0)+ " <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> " + a.getDLPredicate() + " . ");
145 }
146
147 if (a.getArity() == 2) {
148 writer.println(a.getArgument(0)+ " " + a.getDLPredicate() + " "+ a.getArgument(1) + " . ");
149 }
150 }
151 }catch (FileNotFoundException e) {
152 e.printStackTrace();
153 } finally{
154 writer.close();
155 positiveFacts.clear();
156 positiveFacts = null;
157 }
158
159 }
160
161
162 private void writeRules(Set<DLClause> clauses, File ruleFile) {
163 PrintWriter writer = null;
164 boolean first;
165 Atom emptyHeadAtom = Atom.create(AtomicConcept.NOTHING, X);
166 try {
167 writer = new PrintWriter(ruleFile);
168 for (DLClause clause : clauses) {
169 Atom headAtom = clause.getHeadLength() > 0 ? clause.getHeadAtom(0) : emptyHeadAtom;
170 writer.print(headAtom + " :- ");
171 first = true;
172 for (Atom bodyAtom : clause.getBodyAtoms())
173 if (first) {
174 writer.print( bodyAtom);
175 first = false;
176 }
177 else
178 writer.print( ", " + bodyAtom);
179
180 writer.println(" .");
181 }
182 writer.println("<http://www.w3.org/2002/07/owl#sameas>(?X,?Z) :- <http://www.w3.org/2002/07/owl#sameas>(?X,?Y), <http://www.w3.org/2002/07/owl#sameas>(?Y,?Z) .");
183 writer.println("<http://www.w3.org/2002/07/owl#sameas>(?Y,?X) :- <http://www.w3.org/2002/07/owl#sameas>(?X,?Y) .");
184
185 }catch (FileNotFoundException e) {
186 e.printStackTrace();
187 } finally{
188 writer.close();
189 clauses.clear();
190 clauses = null;
191 }
192
193 }
194
195
196 public void clausify(OWLDataFactory factory,String ontologyIRI,OWLAxioms axioms,OWLAxiomsExpressivity axiomsExpressivity, File dataFile, File ruleFile) {
197 Set<DLClause> dlClauses=new LinkedHashSet<DLClause>();
198 Set<Atom> positiveFacts=new HashSet<Atom>();
199 for (OWLObjectPropertyExpression[] inclusion : axioms.m_simpleObjectPropertyInclusions) {
200 Atom subRoleAtom=getRoleAtom(inclusion[0],X,Y);
201 Atom superRoleAtom=getRoleAtom(inclusion[1],X,Y);
202 DLClause dlClause=DLClause.create(new Atom[] { superRoleAtom },new Atom[] { subRoleAtom });
203 dlClauses.add(dlClause);
204 }
205 NormalizedDatalogAxiomClausifier clausifier=new NormalizedDatalogAxiomClausifier(positiveFacts,factory);
206 for (OWLClassExpression[] inclusion : axioms.m_conceptInclusions) {
207 for (OWLClassExpression description : inclusion)
208 description.accept(clausifier);
209 for(DLClause dlClause :clausifier.getDLClause())
210 dlClauses.add(dlClause.getSafeVersion(AtomicConcept.THING));
211 }
212 DatalogFactClausifier factClausifier=new DatalogFactClausifier(positiveFacts);
213 for (OWLIndividualAxiom fact : axioms.m_facts)
214 fact.accept(factClausifier);
215 writeDataFile(positiveFacts, dataFile);
216 writeRules(dlClauses, ruleFile);
217 }
218
219 protected static AtomicRole getAtomicRole(OWLDataPropertyExpression dataPropertyExpression) {
220 return AtomicRole.create(((OWLDataProperty)dataPropertyExpression).getIRI().toString());
221 }
222 protected static Atom getRoleAtom(OWLObjectPropertyExpression objectProperty,Term first,Term second) {
223 objectProperty=objectProperty.getSimplified();
224 if (!objectProperty.isAnonymous()) {
225 AtomicRole role=AtomicRole.create(objectProperty.asOWLObjectProperty().getIRI().toString());
226 return Atom.create(role,first,second);
227 }
228 else if (objectProperty.isAnonymous()) {
229 OWLObjectProperty internalObjectProperty=objectProperty.getNamedProperty();
230 AtomicRole role=AtomicRole.create(internalObjectProperty.getIRI().toString());
231 return Atom.create(role,second,first);
232 }
233 else
234 throw new IllegalStateException("Internal error: unsupported type of object property!");
235 }
236
237
238 protected static Role getRole(OWLObjectPropertyExpression objectPropertyExpression) {
239 objectPropertyExpression=objectPropertyExpression.getSimplified();
240 if (objectPropertyExpression instanceof OWLObjectProperty)
241 return AtomicRole.create(((OWLObjectProperty)objectPropertyExpression).getIRI().toString());
242 else if (objectPropertyExpression instanceof OWLObjectInverseOf) {
243 OWLObjectPropertyExpression internal=((OWLObjectInverseOf)objectPropertyExpression).getInverse();
244 if (!(internal instanceof OWLObjectProperty))
245 throw new IllegalStateException("Internal error: invalid normal form.");
246 return AtomicRole.create(((OWLObjectProperty)internal).getIRI().toString()).getInverse();
247 }
248 else
249 throw new IllegalStateException("Internal error: invalid normal form.");
250 }
251
252 protected static Atom getRoleAtom(OWLDataPropertyExpression dataProperty,Term first,Term second) {
253 if (dataProperty instanceof OWLDataProperty) {
254 AtomicRole property=AtomicRole.create(((OWLDataProperty)dataProperty).getIRI().toString());
255 return Atom.create(property,first,second);
256 }
257 else
258 throw new IllegalStateException("Internal error: unsupported type of data property!");
259 }
260 protected static Individual getIndividual(OWLIndividual individual) {
261 if (individual.isAnonymous())
262 return Individual.createAnonymous(individual.asOWLAnonymousIndividual().getID().toString());
263 else
264 return Individual.create(individual.asOWLNamedIndividual().getIRI().toString());
265 }
266
267
268 protected static class NormalizedDatalogAxiomClausifier implements OWLClassExpressionVisitor {
269 protected final List<Atom> m_headAtoms;
270 protected final List<Atom> m_bodyAtoms;
271 protected final List<Atom> m_auxAtoms;
272 protected final Set<Atom> m_positiveFacts;
273 protected final OWLDataFactory m_factory;
274 protected int m_yIndex;
275 protected int m_zIndex;
276
277
278 public NormalizedDatalogAxiomClausifier(Set<Atom> positiveFacts,OWLDataFactory factory) {
279 m_headAtoms=new ArrayList<Atom>();
280 m_bodyAtoms=new ArrayList<Atom>();
281 m_auxAtoms=new ArrayList<Atom>();
282 m_positiveFacts=positiveFacts;
283 m_factory=factory;
284 }
285
286
287
288 protected Set<DLClause> getDLClause() {
289
290 Set<DLClause> clauses = new HashSet<DLClause>();
291 Atom[] headAtoms=new Atom[m_headAtoms.size()];
292 m_headAtoms.toArray(headAtoms);
293 Atom[] bodyAtoms=new Atom[m_bodyAtoms.size()];
294 m_bodyAtoms.toArray(bodyAtoms);
295 clauses.add(DLClause.create(headAtoms,bodyAtoms));
296 if (!m_auxAtoms.isEmpty()) {
297 Atom[] auxAtoms=new Atom[m_auxAtoms.size()];
298 m_auxAtoms.toArray(auxAtoms);
299 clauses.add(DLClause.create(auxAtoms,bodyAtoms));
300 }
301 m_headAtoms.clear();
302 m_bodyAtoms.clear();
303 m_auxAtoms.clear();
304 m_yIndex=0;
305 m_zIndex=0;
306 return clauses;
307 }
308 protected void ensureYNotZero() {
309 if (m_yIndex==0)
310 m_yIndex++;
311 }
312 protected Variable nextY() {
313 Variable result;
314 if (m_yIndex==0)
315 result=Y;
316 else
317 result=Variable.create("?Y"+m_yIndex);
318 m_yIndex++;
319 return result;
320 }
321 protected Variable nextZ() {
322 Variable result;
323 if (m_zIndex==0)
324 result=Z;
325 else
326 result=Variable.create("?Z"+m_zIndex);
327 m_zIndex++;
328 return result;
329 }
330
331
332
333
334 private void existentialRestriction(OWLObjectProperty prop, OWLClassExpression filler) {
335 if (filler.isAnonymous())
336 throw new IllegalStateException("Internal error: invalid normal form.");
337 String propertyID = prop.asOWLObjectProperty().toStringID();
338 String propertyShortID = propertyID.substring(propertyID.indexOf('#')+1);
339 String classID = filler.asOWLClass().toStringID();
340 String classShortID = classID.substring(classID.indexOf('#')+1);
341 Individual auxInd = Individual.create("http://www.cs.ox.ac.uk/KARMA/anonymous#:"+propertyShortID + "-"+classShortID);
342 m_headAtoms.add(Atom.create(AtomicRole.create(propertyID), X, auxInd));
343 m_auxAtoms.add(Atom.create(AtomicConcept.create(classID), auxInd));
344 }
345
346
347 // Various types of descriptions
348
349 public void visit(OWLClass object) {
350
351 m_headAtoms.add(Atom.create(AtomicConcept.create(object.getIRI().toString()),X));
352 }
353
354
355 public void visit(OWLObjectIntersectionOf object) {
356 throw new IllegalStateException("Internal error: invalid normal form.");
357 }
358 public void visit(OWLObjectUnionOf object) {
359 throw new IllegalStateException("Internal error: invalid normal form.");
360 }
361
362
363 public void visit(OWLObjectComplementOf object) {
364 OWLClassExpression description=object.getOperand();
365 if (description instanceof OWLObjectHasSelf) {
366 OWLObjectPropertyExpression objectProperty=((OWLObjectHasSelf)description).getProperty();
367 Atom roleAtom=getRoleAtom(objectProperty,X,X);
368 m_bodyAtoms.add(roleAtom);
369 throw new IllegalStateException("Internal error: invalid normal form.");
370 }
371 else if (description instanceof OWLObjectOneOf && ((OWLObjectOneOf)description).getIndividuals().size()==1) {
372 OWLIndividual individual=((OWLObjectOneOf)description).getIndividuals().iterator().next();
373 m_bodyAtoms.add(Atom.create(Equality.INSTANCE,X, getIndividual(individual)));
374 }
375 else if (!(description instanceof OWLClass))
376 throw new IllegalStateException("Internal error: invalid normal form.");
377 else
378 m_bodyAtoms.add(Atom.create(AtomicConcept.create(((OWLClass)description).getIRI().toString()),X));
379 }
380
381
382
383 public void visit(OWLObjectOneOf object) {
384 for (OWLIndividual individual : object.getIndividuals()) {
385 m_headAtoms.add(Atom.create(Equality.INSTANCE,X,getIndividual(individual)));
386 }
387 }
388
389
390
391
392 public void visit(OWLObjectSomeValuesFrom object) {
393
394 OWLClassExpression filler=object.getFiller();
395 if (filler instanceof OWLObjectOneOf) {
396 for (OWLIndividual individual : ((OWLObjectOneOf)filler).getIndividuals()) {
397 m_headAtoms.add(getRoleAtom(object.getProperty(),X,getIndividual(individual)));
398 }
399 } else {
400 if (filler.isAnonymous())
401 throw new IllegalStateException("Internal error: invalid normal form.");
402 existentialRestriction(object.getProperty().asOWLObjectProperty(), filler);
403 }
404 }
405
406
407 public void visit(OWLObjectAllValuesFrom object) {
408 Variable y=nextY();
409 m_bodyAtoms.add(getRoleAtom(object.getProperty(),X,y));
410 OWLClassExpression filler=object.getFiller();
411
412 if (filler instanceof OWLClass) {
413 AtomicConcept atomicConcept=AtomicConcept.create(((OWLClass)filler).getIRI().toString());
414 if (!atomicConcept.isAlwaysFalse())
415 m_headAtoms.add(Atom.create(atomicConcept,y));
416 }
417 else if (filler instanceof OWLObjectOneOf) {
418 for (OWLIndividual individual : ((OWLObjectOneOf)filler).getIndividuals()) {
419 m_headAtoms.add(Atom.create(Equality.INSTANCE,y,getIndividual(individual)));
420 }
421 }
422 else if (filler instanceof OWLObjectComplementOf) {
423 OWLClassExpression operand=((OWLObjectComplementOf)filler).getOperand();
424 if (operand instanceof OWLClass) {
425 AtomicConcept internalAtomicConcept=AtomicConcept.create(((OWLClass)operand).getIRI().toString());
426 if (!internalAtomicConcept.isAlwaysTrue())
427 m_bodyAtoms.add(Atom.create(internalAtomicConcept,y));
428 }
429 else if (operand instanceof OWLObjectOneOf && ((OWLObjectOneOf)operand).getIndividuals().size()==1) {
430 OWLIndividual individual=((OWLObjectOneOf)operand).getIndividuals().iterator().next();
431 m_bodyAtoms.add(Atom.create(Equality.INSTANCE,y,getIndividual(individual)));
432 }
433 else
434 throw new IllegalStateException("Internal error: invalid normal form.");
435 }
436 else
437 throw new IllegalStateException("Internal error: invalid normal form.");
438 }
439 public void visit(OWLObjectHasValue object) {
440 throw new IllegalStateException("Internal error: invalid normal form.");
441 }
442 public void visit(OWLObjectHasSelf object) {
443 throw new IllegalStateException("Internal error: invalid normal form.");
444 }
445
446 public void visit(OWLObjectMinCardinality object) {
447 if (object.getCardinality() != 1)
448 throw new IllegalStateException("Internal error: invalid normal form.");
449 existentialRestriction(object.getProperty().asOWLObjectProperty(), object.getFiller());
450 }
451 public void visit(OWLObjectMaxCardinality object) {
452 throw new IllegalStateException("Internal error: invalid normal form.");
453// int cardinality=object.getCardinality();
454// OWLObjectPropertyExpression onObjectProperty=object.getProperty();
455// OWLClassExpression filler=object.getFiller();
456// ensureYNotZero();
457// boolean isPositive;
458// AtomicConcept atomicConcept;
459// if (filler instanceof OWLClass) {
460// isPositive=true;
461// atomicConcept=AtomicConcept.create(((OWLClass)filler).getIRI().toString());
462// if (atomicConcept.isAlwaysTrue())
463// atomicConcept=null;
464// }
465// else if (filler instanceof OWLObjectComplementOf) {
466// OWLClassExpression internal=((OWLObjectComplementOf)filler).getOperand();
467// if (!(internal instanceof OWLClass))
468// throw new IllegalStateException("Internal error: Invalid ontology normal form.");
469// isPositive=false;
470// atomicConcept=AtomicConcept.create(((OWLClass)internal).getIRI().toString());
471// if (atomicConcept.isAlwaysFalse())
472// atomicConcept=null;
473// }
474// else
475// throw new IllegalStateException("Internal error: Invalid ontology normal form.");
476// Role onRole=getRole(onObjectProperty);
477// LiteralConcept toConcept=getLiteralConcept(filler);
478// AnnotatedEquality annotatedEquality=AnnotatedEquality.create(cardinality,onRole,toConcept);
479// Variable[] yVars=new Variable[cardinality+1];
480// for (int i=0;i<yVars.length;i++) {
481// yVars[i]=nextY();
482// m_bodyAtoms.add(getRoleAtom(onObjectProperty,X,yVars[i]));
483// if (atomicConcept!=null) {
484// Atom atom=Atom.create(atomicConcept,yVars[i]);
485// if (isPositive)
486// m_bodyAtoms.add(atom);
487// else
488// m_headAtoms.add(atom);
489// }
490// }
491// // Node ID comparisons are not needed in case of functionality axioms,
492// // as the effect of these is simulated by the way in which the rules are applied.
493// if (yVars.length>2) {
494// for (int i=0;i<yVars.length-1;i++)
495// m_bodyAtoms.add(Atom.create(NodeIDLessEqualThan.INSTANCE,yVars[i],yVars[i+1]));
496// m_bodyAtoms.add(Atom.create(NodeIDsAscendingOrEqual.create(yVars.length),yVars));
497// }
498// for (int i=0;i<yVars.length;i++)
499// for (int j=i+1;j<yVars.length;j++)
500// m_headAtoms.add(Atom.create(annotatedEquality,yVars[i],yVars[j],X));
501 }
502 public void visit(OWLObjectExactCardinality object) {
503 throw new IllegalStateException("Internal error: invalid normal form.");
504 }
505 public void visit(OWLDataSomeValuesFrom object) {
506 throw new IllegalStateException("Internal error: invalid normal form.");
507 }
508 public void visit(OWLDataAllValuesFrom object) {
509 throw new IllegalStateException("Internal error: invalid normal form.");
510 }
511 public void visit(OWLDataHasValue object) {
512 throw new IllegalStateException("Internal error: Invalid normal form.");
513 }
514 public void visit(OWLDataMinCardinality object) {
515 throw new IllegalStateException("Internal error: invalid normal form.");
516 }
517 public void visit(OWLDataMaxCardinality object) {
518 throw new IllegalStateException("Internal error: invalid normal form.");
519 }
520 public void visit(OWLDataExactCardinality object) {
521 throw new IllegalStateException("Internal error: invalid normal form.");
522 }
523 }
524
525 protected static class DatalogFactClausifier extends OWLAxiomVisitorAdapter {
526 protected final Set<Atom> m_positiveFacts;
527
528 public DatalogFactClausifier(Set<Atom> positiveFacts) {
529 m_positiveFacts=positiveFacts;
530 }
531 public void visit(OWLSameIndividualAxiom object) {
532 OWLIndividual[] individuals=new OWLIndividual[object.getIndividuals().size()];
533 object.getIndividuals().toArray(individuals);
534 for (int i=0;i<individuals.length-1;i++)
535 m_positiveFacts.add(Atom.create(Equality.create(),getIndividual(individuals[i]),getIndividual(individuals[i+1])));
536 }
537 public void visit(OWLDifferentIndividualsAxiom object) {
538 throw new IllegalStateException("Internal error: invalid normal form.");
539 }
540 public void visit(OWLClassAssertionAxiom object) {
541 OWLClassExpression description=object.getClassExpression();
542 if (description instanceof OWLClass) {
543 AtomicConcept atomicConcept=AtomicConcept.create(((OWLClass)description).getIRI().toString());
544 m_positiveFacts.add(Atom.create(atomicConcept,getIndividual(object.getIndividual())));
545 }
546 else if (description instanceof OWLObjectComplementOf && ((OWLObjectComplementOf)description).getOperand() instanceof OWLClass) {
547 throw new IllegalStateException("Internal error: invalid normal form.");
548 }
549 else if (description instanceof OWLObjectHasSelf) {
550 throw new IllegalStateException("Internal error: invalid normal form.");
551 }
552 else if (description instanceof OWLObjectComplementOf && ((OWLObjectComplementOf)description).getOperand() instanceof OWLObjectHasSelf) {
553 throw new IllegalStateException("Internal error: invalid normal form.");
554 }
555 else
556 throw new IllegalStateException("Internal error: invalid normal form.");
557 }
558 public void visit(OWLObjectPropertyAssertionAxiom object) {
559 m_positiveFacts.add(getRoleAtom(object.getProperty(),getIndividual(object.getSubject()),getIndividual(object.getObject())));
560 }
561 public void visit(OWLNegativeObjectPropertyAssertionAxiom object) {
562 throw new IllegalStateException("Internal error: invalid normal form."); }
563 public void visit(OWLDataPropertyAssertionAxiom object) {
564
565 }
566 public void visit(OWLNegativeDataPropertyAssertionAxiom object) {
567
568 }
569 }
570
571}
572
diff --git a/src/org/semanticweb/karma2/exception/ConstraintException.java b/src/org/semanticweb/karma2/exception/ConstraintException.java
new file mode 100644
index 0000000..d02e1b1
--- /dev/null
+++ b/src/org/semanticweb/karma2/exception/ConstraintException.java
@@ -0,0 +1,17 @@
1package org.semanticweb.karma2.exception;
2
3
4public class ConstraintException extends Exception {
5
6 /**
7 *
8 */
9 private static final long serialVersionUID = -2358667126775918262L;
10
11 public ConstraintException(String string) {
12 super(string);
13 }
14
15
16}
17
diff --git a/src/org/semanticweb/karma2/exception/IllegalInputOntologyException.java b/src/org/semanticweb/karma2/exception/IllegalInputOntologyException.java
new file mode 100644
index 0000000..18df25a
--- /dev/null
+++ b/src/org/semanticweb/karma2/exception/IllegalInputOntologyException.java
@@ -0,0 +1,9 @@
1package org.semanticweb.karma2.exception;
2
3public class IllegalInputOntologyException extends Exception {
4
5 private static final long serialVersionUID = -2352640877361631466L;
6
7 public IllegalInputOntologyException(String msg) {}
8
9}
diff --git a/src/org/semanticweb/karma2/exception/IllegalInputQueryException.java b/src/org/semanticweb/karma2/exception/IllegalInputQueryException.java
new file mode 100644
index 0000000..bb94cb7
--- /dev/null
+++ b/src/org/semanticweb/karma2/exception/IllegalInputQueryException.java
@@ -0,0 +1,14 @@
1package org.semanticweb.karma2.exception;
2
3public class IllegalInputQueryException extends Exception {
4
5 /**
6 *
7 */
8 private static final long serialVersionUID = 1L;
9
10 public IllegalInputQueryException(String string) {
11 super(string);
12 }
13
14}
diff --git a/src/org/semanticweb/karma2/exception/QueryExecutionException.java b/src/org/semanticweb/karma2/exception/QueryExecutionException.java
new file mode 100644
index 0000000..bb393ba
--- /dev/null
+++ b/src/org/semanticweb/karma2/exception/QueryExecutionException.java
@@ -0,0 +1,13 @@
1package org.semanticweb.karma2.exception;
2
3import java.util.concurrent.ExecutionException;
4
5public class QueryExecutionException extends ExecutionException {
6
7 private static final long serialVersionUID = 4082514276158055768L;
8
9 public QueryExecutionException(String msg) {
10 super(msg);
11 }
12
13}
diff --git a/src/org/semanticweb/karma2/model/ConjunctiveQuery.java b/src/org/semanticweb/karma2/model/ConjunctiveQuery.java
new file mode 100644
index 0000000..17f3169
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/ConjunctiveQuery.java
@@ -0,0 +1,80 @@
1package org.semanticweb.karma2.model;
2
3import java.io.FileNotFoundException;
4import java.io.IOException;
5import java.util.Map;
6
7import org.semanticweb.HermiT.model.Atom;
8import org.semanticweb.HermiT.model.Term;
9import org.semanticweb.karma2.exception.IllegalInputQueryException;
10import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryParser;
11
12import uk.ac.ox.cs.JRDFox.Prefixes;
13
14
15public class ConjunctiveQuery {
16
17
18 protected final Atom[] m_queryAtoms;
19 protected final Term[] m_answerTerms;
20 protected final Term[] m_resultBuffer;
21 protected Prefixes prefixes;
22
23 public ConjunctiveQuery(Atom[] queryAtoms,Term[] answerTerms) {
24 m_queryAtoms=queryAtoms;
25 m_answerTerms=answerTerms;
26 m_resultBuffer=answerTerms.clone();
27 prefixes = new Prefixes();
28 }
29
30
31 public Prefixes getPrefixes() {
32 return this.prefixes;
33 }
34
35 public ConjunctiveQuery(Atom[] queryAtoms,Term[] answerTerms, Prefixes prefixes) {
36 m_queryAtoms=queryAtoms;
37 m_answerTerms=answerTerms;
38 m_resultBuffer=answerTerms.clone();
39 this.prefixes = prefixes;
40 }
41
42 public int getNumberOfQueryAtoms() {
43 return m_queryAtoms.length;
44 }
45 public Atom getQueryAtom(int atomIndex) {
46 return m_queryAtoms[atomIndex];
47 }
48 public int getNumberOfAnswerTerms() {
49 return m_answerTerms.length;
50 }
51 public Term getAnswerTerm(int termIndex) {
52 return m_answerTerms[termIndex];
53 }
54
55
56 public String toString() {
57 String res = "";
58 Map<String, String> iriMap = prefixes.getPrefixIRIsByPrefixName();
59 for (String shortIri: iriMap.keySet())
60 res += "prefix " + shortIri + " <" + iriMap.get(shortIri) + ">\n";
61 res += "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n";
62 res += "select ";
63 for (Term t: m_answerTerms)
64 res+= " " + t + " ";
65 res += " WHERE { ";
66 for (Atom a : m_queryAtoms){
67 if (a.getArity() == 1)
68 res+= a.getArgument(0) + " rdf:type " + a.getDLPredicate().toString().replace('<', ' ').replace('>', ' ') + " . ";
69 if (a.getArity() == 2)
70 res+= a.getArgument(0) + " " + a.getDLPredicate().toString().replace('<', ' ').replace('>', ' ') + " " + a.getArgument(1) + " . ";
71 }
72 return res + "}";
73
74 }
75
76
77 public static ConjunctiveQuery parse(String query) throws FileNotFoundException, IllegalInputQueryException, IOException {
78 return (new ConjunctiveQueryParser(query)).parse();
79 }
80} \ No newline at end of file
diff --git a/src/org/semanticweb/karma2/model/Equality.java b/src/org/semanticweb/karma2/model/Equality.java
new file mode 100644
index 0000000..fcb270c
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/Equality.java
@@ -0,0 +1,36 @@
1package org.semanticweb.karma2.model;
2
3import java.io.Serializable;
4
5import org.semanticweb.HermiT.Prefixes;
6import org.semanticweb.HermiT.model.DLPredicate;
7
8/**
9 * Represents the equality predicate.
10 */
11public class Equality implements DLPredicate,Serializable {
12 private static final long serialVersionUID=8308051741088513244L;
13
14 public static final Equality INSTANCE=new Equality();
15
16 protected Equality () {
17 }
18 public int getArity() {
19 return 2;
20 }
21 public String toString(Prefixes prefixes) {
22 return "<http://www.w3.org/2002/07/owl#sameas>";
23 }
24 public String toOrderedString(Prefixes prefixes) {
25 return toString(prefixes);
26 }
27 public String toString() {
28 return toString(Prefixes.STANDARD_PREFIXES);
29 }
30 protected Object readResolve() {
31 return INSTANCE;
32 }
33 public static Equality create() {
34 return INSTANCE;
35 }
36}
diff --git a/src/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java b/src/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java
new file mode 100644
index 0000000..de2f87e
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java
@@ -0,0 +1,94 @@
1package org.semanticweb.karma2.model;
2
3import java.util.LinkedHashSet;
4import java.util.LinkedList;
5import java.util.List;
6import java.util.Set;
7
8import org.semanticweb.HermiT.model.Atom;
9import org.semanticweb.HermiT.model.Term;
10import org.semanticweb.HermiT.model.Variable;
11
12import uk.ac.ox.cs.JRDFox.Prefixes;
13
14
15public class ExtendedConjunctiveQuery extends ConjunctiveQuery {
16
17 private Term[] ansTerms;
18 private Term[] terms;
19
20 public ExtendedConjunctiveQuery(
21 Atom[] queryAtoms, Term[] answerTerms, Prefixes pref) {
22 super(queryAtoms, getExtendedHead(queryAtoms, answerTerms), pref);
23 this.ansTerms = answerTerms.clone();
24 terms = getQueryTerms(queryAtoms);
25 }
26
27 public int getNumberOfRealAnswerTerms() {
28 return ansTerms.length;
29 }
30
31 public Term getRealAnswerTerm(int termIndex) {
32 return ansTerms[termIndex];
33 }
34
35 public int getNumberOfTerms() {
36 return terms.length;
37 }
38
39 public Term[] getTerms() {
40 return terms;
41 }
42
43
44 public Atom[] getAtoms() {
45 return m_queryAtoms;
46 }
47
48 public Term[] getRealAnswerTerms() {
49 return ansTerms;
50 }
51
52 private static Term[] getExtendedHead(Atom[] queryAtoms, Term[] answerTerms) {
53 List<Term> terms = new LinkedList<Term>();
54 for (Term t :answerTerms) {
55 terms.add(t);
56 }
57 for (Atom a : queryAtoms) {
58 if (a.getArgument(0) instanceof Variable && !terms.contains(a.getArgument(0)))
59 terms.add(a.getArgument(0));
60 if (a.getArity()> 1 && a.getArgument(1) instanceof Variable && !terms.contains(a.getArgument(1)))
61 terms.add(a.getArgument(1));
62 }
63 return terms.toArray(new Term[terms.size()]);
64
65 }
66
67 private static Term[] getQueryTerms(Atom[] queryAtoms) {
68 Set<Term> terms = new LinkedHashSet<Term>();
69 for (Atom a : queryAtoms) {
70 terms.add(a.getArgument(0));
71 if (a.getArity()> 1)
72 terms.add(a.getArgument(1));
73 }
74 return terms.toArray(new Term[terms.size()]);
75 }
76
77
78 public static ExtendedConjunctiveQuery computeExtension(ConjunctiveQuery q) {
79 Term[] answerTerms = new Term[q.getNumberOfAnswerTerms()];
80 for (int i = 0; i < q.getNumberOfAnswerTerms(); i++)
81 answerTerms[i] = q.getAnswerTerm(i);
82 Atom[] atoms = new Atom[q.getNumberOfQueryAtoms()];
83 for (int i = 0; i < q.getNumberOfQueryAtoms(); i++)
84 atoms[i] = q.getQueryAtom(i);
85 return new ExtendedConjunctiveQuery(atoms, answerTerms,q.prefixes);
86
87 }
88
89 public Term getTerm(int i) {
90 return terms[i];
91 }
92
93
94}
diff --git a/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g
new file mode 100644
index 0000000..621b0ce
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g
@@ -0,0 +1,140 @@
1grammar ConjunctiveQuery;
2
3options {
4 language = Java;
5 output = AST;
6}
7
8tokens {
9 VARIABLE;
10 CONSTANT;
11 SCONSTANT;
12 ATOM;
13 HEADATOM;
14 PREDICATE;
15 ATOM_LIST;
16 TERM_LIST;
17 RULE;
18 EXPRESSION;
19 PREFIX_LIST;
20 ID;
21 PREFIX;
22 PREDICATE;
23}
24
25@header {
26package org.semanticweb.karma2.model.cqparser;
27
28import java.io.File;
29import java.io.FileInputStream;
30import java.io.InputStream;
31import java.io.FileNotFoundException;
32import java.io.IOException;
33import java.util.Set;
34
35import org.semanticweb.karma2.model.ConjunctiveQuery;
36
37
38import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryWalker;
39import org.semanticweb.karma2.exception.IllegalInputQueryException;
40
41
42
43}
44
45@members{
46
47
48
49 public ConjunctiveQueryParser(String string)
50 throws FileNotFoundException, IOException {
51 this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRStringStream(string))));
52 }
53
54 public ConjunctiveQueryParser(InputStream istream) throws FileNotFoundException, IOException {
55 this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(istream))));
56
57 }
58
59
60 public ConjunctiveQueryParser(File file) throws FileNotFoundException, IOException {
61 this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(new FileInputStream(file)))));
62
63 }
64
65 public ConjunctiveQuery parse() throws IllegalInputQueryException {
66 cq_return r = null;
67 try {
68 r = cq();
69 } catch (RecognitionException e) {
70 e.printStackTrace();
71 }
72 CommonTree t = (CommonTree) r.getTree();
73
74 //System.out.println(t.toStringTree());
75 CommonTreeNodeStream nodes = new CommonTreeNodeStream(t);
76 // AST nodes have payloads that point into token stream
77 nodes.setTokenStream(input);
78
79
80 ConjunctiveQueryWalker walker = new ConjunctiveQueryWalker();
81
82 ConjunctiveQuery cq = walker.walkExpressionNode(t);
83 return cq;
84 }
85
86 public ConjunctiveQuery parseCQ() throws IllegalInputQueryException {
87 return parse();
88 }
89
90}
91
92
93@lexer::header{
94package org.semanticweb.karma2.model.cqparser;
95}
96
97cq :
98 prefixlist rulebody -> ^(EXPRESSION prefixlist rulebody );
99
100prefixlist:
101 prefix (',' prefix)* -> ^(PREFIX_LIST prefix*);
102
103prefix:
104 'prefix' id ':' '<' url '>' -> ^(PREFIX id url);
105
106
107rulebody:
108 headatom ('<-'|':') body '.'? -> ^(RULE headatom body);
109
110body:
111 atom (',' atom)* -> ^(ATOM_LIST atom*);
112
113
114headatom:
115 id '(' term (',' term)* ')' -> ^(HEADATOM term*);
116
117atom:
118 compositeid '(' term (',' term)* ')' -> ^(ATOM compositeid term*);
119
120compositeid:
121 (id) ':' (id) -> ^(ID id id);
122
123
124term:
125 variable -> ^(VARIABLE variable)
126 | simpleid -> ^(SCONSTANT simpleid)
127 | compositeid -> ^(CONSTANT compositeid);
128
129id : (STRING);
130simpleid : '<' URLSTRING '>' | '<' STRING '>';
131
132// TODO: FIXIT X1 can be parsed as variable
133variable:
134 ('?') id -> ^(id);
135
136 url : (URLSTRING);
137
138URLSTRING : ('http://'|'file:/') ('a'..'z'|'A'..'Z'|'0'..'9'|'/'|'#'|'.'|'-'|'~'|'_')+;
139STRING : ('a'..'z'|'A'..'Z'|'0'..'9'|'/'|'#'|'.'|'-'|'_')+;
140WS : (' '|'\n'|'\r')+ {$channel=HIDDEN;} ;
diff --git a/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens
new file mode 100644
index 0000000..3b4fa39
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens
@@ -0,0 +1,36 @@
1T__20=20
2T__21=21
3T__22=22
4T__23=23
5T__24=24
6T__25=25
7T__26=26
8T__27=27
9T__28=28
10T__29=29
11ATOM=4
12ATOM_LIST=5
13CONSTANT=6
14EXPRESSION=7
15HEADATOM=8
16ID=9
17PREDICATE=10
18PREFIX=11
19PREFIX_LIST=12
20RULE=13
21SCONSTANT=14
22STRING=15
23TERM_LIST=16
24URLSTRING=17
25VARIABLE=18
26WS=19
27'('=20
28')'=21
29','=22
30'.'=23
31':'=24
32'<'=25
33'<-'=26
34'>'=27
35'?'=28
36'prefix'=29
diff --git a/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java
new file mode 100644
index 0000000..a97d7a4
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java
@@ -0,0 +1,814 @@
1// $ANTLR 3.5 /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g 2013-12-13 14:41:34
2
3package org.semanticweb.karma2.model.cqparser;
4
5
6import org.antlr.runtime.*;
7import java.util.Stack;
8import java.util.List;
9import java.util.ArrayList;
10
11@SuppressWarnings("all")
12public class ConjunctiveQueryLexer extends Lexer {
13 public static final int EOF=-1;
14 public static final int T__20=20;
15 public static final int T__21=21;
16 public static final int T__22=22;
17 public static final int T__23=23;
18 public static final int T__24=24;
19 public static final int T__25=25;
20 public static final int T__26=26;
21 public static final int T__27=27;
22 public static final int T__28=28;
23 public static final int T__29=29;
24 public static final int ATOM=4;
25 public static final int ATOM_LIST=5;
26 public static final int CONSTANT=6;
27 public static final int EXPRESSION=7;
28 public static final int HEADATOM=8;
29 public static final int ID=9;
30 public static final int PREDICATE=10;
31 public static final int PREFIX=11;
32 public static final int PREFIX_LIST=12;
33 public static final int RULE=13;
34 public static final int SCONSTANT=14;
35 public static final int STRING=15;
36 public static final int TERM_LIST=16;
37 public static final int URLSTRING=17;
38 public static final int VARIABLE=18;
39 public static final int WS=19;
40
41 // delegates
42 // delegators
43 public Lexer[] getDelegates() {
44 return new Lexer[] {};
45 }
46
47 public ConjunctiveQueryLexer() {}
48 public ConjunctiveQueryLexer(CharStream input) {
49 this(input, new RecognizerSharedState());
50 }
51 public ConjunctiveQueryLexer(CharStream input, RecognizerSharedState state) {
52 super(input,state);
53 }
54 @Override public String getGrammarFileName() { return "/home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g"; }
55
56 // $ANTLR start "T__20"
57 public final void mT__20() throws RecognitionException {
58 try {
59 int _type = T__20;
60 int _channel = DEFAULT_TOKEN_CHANNEL;
61 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:11:7: ( '(' )
62 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:11:9: '('
63 {
64 match('(');
65 }
66
67 state.type = _type;
68 state.channel = _channel;
69 }
70 finally {
71 // do for sure before leaving
72 }
73 }
74 // $ANTLR end "T__20"
75
76 // $ANTLR start "T__21"
77 public final void mT__21() throws RecognitionException {
78 try {
79 int _type = T__21;
80 int _channel = DEFAULT_TOKEN_CHANNEL;
81 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:12:7: ( ')' )
82 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:12:9: ')'
83 {
84 match(')');
85 }
86
87 state.type = _type;
88 state.channel = _channel;
89 }
90 finally {
91 // do for sure before leaving
92 }
93 }
94 // $ANTLR end "T__21"
95
96 // $ANTLR start "T__22"
97 public final void mT__22() throws RecognitionException {
98 try {
99 int _type = T__22;
100 int _channel = DEFAULT_TOKEN_CHANNEL;
101 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:13:7: ( ',' )
102 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:13:9: ','
103 {
104 match(',');
105 }
106
107 state.type = _type;
108 state.channel = _channel;
109 }
110 finally {
111 // do for sure before leaving
112 }
113 }
114 // $ANTLR end "T__22"
115
116 // $ANTLR start "T__23"
117 public final void mT__23() throws RecognitionException {
118 try {
119 int _type = T__23;
120 int _channel = DEFAULT_TOKEN_CHANNEL;
121 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:14:7: ( '.' )
122 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:14:9: '.'
123 {
124 match('.');
125 }
126
127 state.type = _type;
128 state.channel = _channel;
129 }
130 finally {
131 // do for sure before leaving
132 }
133 }
134 // $ANTLR end "T__23"
135
136 // $ANTLR start "T__24"
137 public final void mT__24() throws RecognitionException {
138 try {
139 int _type = T__24;
140 int _channel = DEFAULT_TOKEN_CHANNEL;
141 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:15:7: ( ':' )
142 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:15:9: ':'
143 {
144 match(':');
145 }
146
147 state.type = _type;
148 state.channel = _channel;
149 }
150 finally {
151 // do for sure before leaving
152 }
153 }
154 // $ANTLR end "T__24"
155
156 // $ANTLR start "T__25"
157 public final void mT__25() throws RecognitionException {
158 try {
159 int _type = T__25;
160 int _channel = DEFAULT_TOKEN_CHANNEL;
161 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:16:7: ( '<' )
162 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:16:9: '<'
163 {
164 match('<');
165 }
166
167 state.type = _type;
168 state.channel = _channel;
169 }
170 finally {
171 // do for sure before leaving
172 }
173 }
174 // $ANTLR end "T__25"
175
176 // $ANTLR start "T__26"
177 public final void mT__26() throws RecognitionException {
178 try {
179 int _type = T__26;
180 int _channel = DEFAULT_TOKEN_CHANNEL;
181 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:17:7: ( '<-' )
182 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:17:9: '<-'
183 {
184 match("<-");
185
186 }
187
188 state.type = _type;
189 state.channel = _channel;
190 }
191 finally {
192 // do for sure before leaving
193 }
194 }
195 // $ANTLR end "T__26"
196
197 // $ANTLR start "T__27"
198 public final void mT__27() throws RecognitionException {
199 try {
200 int _type = T__27;
201 int _channel = DEFAULT_TOKEN_CHANNEL;
202 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:18:7: ( '>' )
203 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:18:9: '>'
204 {
205 match('>');
206 }
207
208 state.type = _type;
209 state.channel = _channel;
210 }
211 finally {
212 // do for sure before leaving
213 }
214 }
215 // $ANTLR end "T__27"
216
217 // $ANTLR start "T__28"
218 public final void mT__28() throws RecognitionException {
219 try {
220 int _type = T__28;
221 int _channel = DEFAULT_TOKEN_CHANNEL;
222 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:19:7: ( '?' )
223 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:19:9: '?'
224 {
225 match('?');
226 }
227
228 state.type = _type;
229 state.channel = _channel;
230 }
231 finally {
232 // do for sure before leaving
233 }
234 }
235 // $ANTLR end "T__28"
236
237 // $ANTLR start "T__29"
238 public final void mT__29() throws RecognitionException {
239 try {
240 int _type = T__29;
241 int _channel = DEFAULT_TOKEN_CHANNEL;
242 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:20:7: ( 'prefix' )
243 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:20:9: 'prefix'
244 {
245 match("prefix");
246
247 }
248
249 state.type = _type;
250 state.channel = _channel;
251 }
252 finally {
253 // do for sure before leaving
254 }
255 }
256 // $ANTLR end "T__29"
257
258 // $ANTLR start "URLSTRING"
259 public final void mURLSTRING() throws RecognitionException {
260 try {
261 int _type = URLSTRING;
262 int _channel = DEFAULT_TOKEN_CHANNEL;
263 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:11: ( ( 'http://' | 'file:/' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '~' | '_' )+ )
264 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:13: ( 'http://' | 'file:/' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '~' | '_' )+
265 {
266 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:13: ( 'http://' | 'file:/' )
267 int alt1=2;
268 int LA1_0 = input.LA(1);
269 if ( (LA1_0=='h') ) {
270 alt1=1;
271 }
272 else if ( (LA1_0=='f') ) {
273 alt1=2;
274 }
275
276 else {
277 NoViableAltException nvae =
278 new NoViableAltException("", 1, 0, input);
279 throw nvae;
280 }
281
282 switch (alt1) {
283 case 1 :
284 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:14: 'http://'
285 {
286 match("http://");
287
288 }
289 break;
290 case 2 :
291 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:24: 'file:/'
292 {
293 match("file:/");
294
295 }
296 break;
297
298 }
299
300 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:34: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '~' | '_' )+
301 int cnt2=0;
302 loop2:
303 while (true) {
304 int alt2=2;
305 int LA2_0 = input.LA(1);
306 if ( (LA2_0=='#'||(LA2_0 >= '-' && LA2_0 <= '9')||(LA2_0 >= 'A' && LA2_0 <= 'Z')||LA2_0=='_'||(LA2_0 >= 'a' && LA2_0 <= 'z')||LA2_0=='~') ) {
307 alt2=1;
308 }
309
310 switch (alt2) {
311 case 1 :
312 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:
313 {
314 if ( input.LA(1)=='#'||(input.LA(1) >= '-' && input.LA(1) <= '9')||(input.LA(1) >= 'A' && input.LA(1) <= 'Z')||input.LA(1)=='_'||(input.LA(1) >= 'a' && input.LA(1) <= 'z')||input.LA(1)=='~' ) {
315 input.consume();
316 }
317 else {
318 MismatchedSetException mse = new MismatchedSetException(null,input);
319 recover(mse);
320 throw mse;
321 }
322 }
323 break;
324
325 default :
326 if ( cnt2 >= 1 ) break loop2;
327 EarlyExitException eee = new EarlyExitException(2, input);
328 throw eee;
329 }
330 cnt2++;
331 }
332
333 }
334
335 state.type = _type;
336 state.channel = _channel;
337 }
338 finally {
339 // do for sure before leaving
340 }
341 }
342 // $ANTLR end "URLSTRING"
343
344 // $ANTLR start "STRING"
345 public final void mSTRING() throws RecognitionException {
346 try {
347 int _type = STRING;
348 int _channel = DEFAULT_TOKEN_CHANNEL;
349 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:139:9: ( ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '_' )+ )
350 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:139:13: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '_' )+
351 {
352 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:139:13: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '_' )+
353 int cnt3=0;
354 loop3:
355 while (true) {
356 int alt3=2;
357 int LA3_0 = input.LA(1);
358 if ( (LA3_0=='#'||(LA3_0 >= '-' && LA3_0 <= '9')||(LA3_0 >= 'A' && LA3_0 <= 'Z')||LA3_0=='_'||(LA3_0 >= 'a' && LA3_0 <= 'z')) ) {
359 alt3=1;
360 }
361
362 switch (alt3) {
363 case 1 :
364 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:
365 {
366 if ( input.LA(1)=='#'||(input.LA(1) >= '-' && input.LA(1) <= '9')||(input.LA(1) >= 'A' && input.LA(1) <= 'Z')||input.LA(1)=='_'||(input.LA(1) >= 'a' && input.LA(1) <= 'z') ) {
367 input.consume();
368 }
369 else {
370 MismatchedSetException mse = new MismatchedSetException(null,input);
371 recover(mse);
372 throw mse;
373 }
374 }
375 break;
376
377 default :
378 if ( cnt3 >= 1 ) break loop3;
379 EarlyExitException eee = new EarlyExitException(3, input);
380 throw eee;
381 }
382 cnt3++;
383 }
384
385 }
386
387 state.type = _type;
388 state.channel = _channel;
389 }
390 finally {
391 // do for sure before leaving
392 }
393 }
394 // $ANTLR end "STRING"
395
396 // $ANTLR start "WS"
397 public final void mWS() throws RecognitionException {
398 try {
399 int _type = WS;
400 int _channel = DEFAULT_TOKEN_CHANNEL;
401 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:140:5: ( ( ' ' | '\\n' | '\\r' )+ )
402 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:140:7: ( ' ' | '\\n' | '\\r' )+
403 {
404 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:140:7: ( ' ' | '\\n' | '\\r' )+
405 int cnt4=0;
406 loop4:
407 while (true) {
408 int alt4=2;
409 int LA4_0 = input.LA(1);
410 if ( (LA4_0=='\n'||LA4_0=='\r'||LA4_0==' ') ) {
411 alt4=1;
412 }
413
414 switch (alt4) {
415 case 1 :
416 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:
417 {
418 if ( input.LA(1)=='\n'||input.LA(1)=='\r'||input.LA(1)==' ' ) {
419 input.consume();
420 }
421 else {
422 MismatchedSetException mse = new MismatchedSetException(null,input);
423 recover(mse);
424 throw mse;
425 }
426 }
427 break;
428
429 default :
430 if ( cnt4 >= 1 ) break loop4;
431 EarlyExitException eee = new EarlyExitException(4, input);
432 throw eee;
433 }
434 cnt4++;
435 }
436
437 _channel=HIDDEN;
438 }
439
440 state.type = _type;
441 state.channel = _channel;
442 }
443 finally {
444 // do for sure before leaving
445 }
446 }
447 // $ANTLR end "WS"
448
449 @Override
450 public void mTokens() throws RecognitionException {
451 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:8: ( T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | URLSTRING | STRING | WS )
452 int alt5=13;
453 switch ( input.LA(1) ) {
454 case '(':
455 {
456 alt5=1;
457 }
458 break;
459 case ')':
460 {
461 alt5=2;
462 }
463 break;
464 case ',':
465 {
466 alt5=3;
467 }
468 break;
469 case '.':
470 {
471 int LA5_4 = input.LA(2);
472 if ( (LA5_4=='#'||(LA5_4 >= '-' && LA5_4 <= '9')||(LA5_4 >= 'A' && LA5_4 <= 'Z')||LA5_4=='_'||(LA5_4 >= 'a' && LA5_4 <= 'z')) ) {
473 alt5=12;
474 }
475
476 else {
477 alt5=4;
478 }
479
480 }
481 break;
482 case ':':
483 {
484 alt5=5;
485 }
486 break;
487 case '<':
488 {
489 int LA5_6 = input.LA(2);
490 if ( (LA5_6=='-') ) {
491 alt5=7;
492 }
493
494 else {
495 alt5=6;
496 }
497
498 }
499 break;
500 case '>':
501 {
502 alt5=8;
503 }
504 break;
505 case '?':
506 {
507 alt5=9;
508 }
509 break;
510 case 'p':
511 {
512 int LA5_9 = input.LA(2);
513 if ( (LA5_9=='r') ) {
514 int LA5_17 = input.LA(3);
515 if ( (LA5_17=='e') ) {
516 int LA5_20 = input.LA(4);
517 if ( (LA5_20=='f') ) {
518 int LA5_23 = input.LA(5);
519 if ( (LA5_23=='i') ) {
520 int LA5_26 = input.LA(6);
521 if ( (LA5_26=='x') ) {
522 int LA5_28 = input.LA(7);
523 if ( (LA5_28=='#'||(LA5_28 >= '-' && LA5_28 <= '9')||(LA5_28 >= 'A' && LA5_28 <= 'Z')||LA5_28=='_'||(LA5_28 >= 'a' && LA5_28 <= 'z')) ) {
524 alt5=12;
525 }
526
527 else {
528 alt5=10;
529 }
530
531 }
532
533 else {
534 alt5=12;
535 }
536
537 }
538
539 else {
540 alt5=12;
541 }
542
543 }
544
545 else {
546 alt5=12;
547 }
548
549 }
550
551 else {
552 alt5=12;
553 }
554
555 }
556
557 else {
558 alt5=12;
559 }
560
561 }
562 break;
563 case 'h':
564 {
565 int LA5_10 = input.LA(2);
566 if ( (LA5_10=='t') ) {
567 int LA5_18 = input.LA(3);
568 if ( (LA5_18=='t') ) {
569 int LA5_21 = input.LA(4);
570 if ( (LA5_21=='p') ) {
571 int LA5_24 = input.LA(5);
572 if ( (LA5_24==':') ) {
573 alt5=11;
574 }
575
576 else {
577 alt5=12;
578 }
579
580 }
581
582 else {
583 alt5=12;
584 }
585
586 }
587
588 else {
589 alt5=12;
590 }
591
592 }
593
594 else {
595 alt5=12;
596 }
597
598 }
599 break;
600 case 'f':
601 {
602 int LA5_11 = input.LA(2);
603 if ( (LA5_11=='i') ) {
604 int LA5_19 = input.LA(3);
605 if ( (LA5_19=='l') ) {
606 int LA5_22 = input.LA(4);
607 if ( (LA5_22=='e') ) {
608 int LA5_25 = input.LA(5);
609 if ( (LA5_25==':') ) {
610 alt5=11;
611 }
612
613 else {
614 alt5=12;
615 }
616
617 }
618
619 else {
620 alt5=12;
621 }
622
623 }
624
625 else {
626 alt5=12;
627 }
628
629 }
630
631 else {
632 alt5=12;
633 }
634
635 }
636 break;
637 case '#':
638 case '-':
639 case '/':
640 case '0':
641 case '1':
642 case '2':
643 case '3':
644 case '4':
645 case '5':
646 case '6':
647 case '7':
648 case '8':
649 case '9':
650 case 'A':
651 case 'B':
652 case 'C':
653 case 'D':
654 case 'E':
655 case 'F':
656 case 'G':
657 case 'H':
658 case 'I':
659 case 'J':
660 case 'K':
661 case 'L':
662 case 'M':
663 case 'N':
664 case 'O':
665 case 'P':
666 case 'Q':
667 case 'R':
668 case 'S':
669 case 'T':
670 case 'U':
671 case 'V':
672 case 'W':
673 case 'X':
674 case 'Y':
675 case 'Z':
676 case '_':
677 case 'a':
678 case 'b':
679 case 'c':
680 case 'd':
681 case 'e':
682 case 'g':
683 case 'i':
684 case 'j':
685 case 'k':
686 case 'l':
687 case 'm':
688 case 'n':
689 case 'o':
690 case 'q':
691 case 'r':
692 case 's':
693 case 't':
694 case 'u':
695 case 'v':
696 case 'w':
697 case 'x':
698 case 'y':
699 case 'z':
700 {
701 alt5=12;
702 }
703 break;
704 case '\n':
705 case '\r':
706 case ' ':
707 {
708 alt5=13;
709 }
710 break;
711 default:
712 NoViableAltException nvae =
713 new NoViableAltException("", 5, 0, input);
714 throw nvae;
715 }
716 switch (alt5) {
717 case 1 :
718 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:10: T__20
719 {
720 mT__20();
721
722 }
723 break;
724 case 2 :
725 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:16: T__21
726 {
727 mT__21();
728
729 }
730 break;
731 case 3 :
732 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:22: T__22
733 {
734 mT__22();
735
736 }
737 break;
738 case 4 :
739 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:28: T__23
740 {
741 mT__23();
742
743 }
744 break;
745 case 5 :
746 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:34: T__24
747 {
748 mT__24();
749
750 }
751 break;
752 case 6 :
753 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:40: T__25
754 {
755 mT__25();
756
757 }
758 break;
759 case 7 :
760 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:46: T__26
761 {
762 mT__26();
763
764 }
765 break;
766 case 8 :
767 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:52: T__27
768 {
769 mT__27();
770
771 }
772 break;
773 case 9 :
774 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:58: T__28
775 {
776 mT__28();
777
778 }
779 break;
780 case 10 :
781 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:64: T__29
782 {
783 mT__29();
784
785 }
786 break;
787 case 11 :
788 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:70: URLSTRING
789 {
790 mURLSTRING();
791
792 }
793 break;
794 case 12 :
795 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:80: STRING
796 {
797 mSTRING();
798
799 }
800 break;
801 case 13 :
802 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:87: WS
803 {
804 mWS();
805
806 }
807 break;
808
809 }
810 }
811
812
813
814}
diff --git a/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java
new file mode 100644
index 0000000..b934e30
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java
@@ -0,0 +1,1611 @@
1// $ANTLR 3.5 /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g 2013-12-13 14:41:34
2
3package org.semanticweb.karma2.model.cqparser;
4
5import java.io.File;
6import java.io.FileInputStream;
7import java.io.InputStream;
8import java.io.FileNotFoundException;
9import java.io.IOException;
10import java.util.Set;
11
12import org.semanticweb.karma2.model.ConjunctiveQuery;
13
14
15import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryWalker;
16import org.semanticweb.karma2.exception.IllegalInputQueryException;
17
18
19
20
21
22import org.antlr.runtime.*;
23import java.util.Stack;
24import java.util.List;
25import java.util.ArrayList;
26
27import org.antlr.runtime.tree.*;
28
29
30@SuppressWarnings("all")
31public class ConjunctiveQueryParser extends Parser {
32 public static final String[] tokenNames = new String[] {
33 "<invalid>", "<EOR>", "<DOWN>", "<UP>", "ATOM", "ATOM_LIST", "CONSTANT",
34 "EXPRESSION", "HEADATOM", "ID", "PREDICATE", "PREFIX", "PREFIX_LIST",
35 "RULE", "SCONSTANT", "STRING", "TERM_LIST", "URLSTRING", "VARIABLE", "WS",
36 "'('", "')'", "','", "'.'", "':'", "'<'", "'<-'", "'>'", "'?'", "'prefix'"
37 };
38 public static final int EOF=-1;
39 public static final int T__20=20;
40 public static final int T__21=21;
41 public static final int T__22=22;
42 public static final int T__23=23;
43 public static final int T__24=24;
44 public static final int T__25=25;
45 public static final int T__26=26;
46 public static final int T__27=27;
47 public static final int T__28=28;
48 public static final int T__29=29;
49 public static final int ATOM=4;
50 public static final int ATOM_LIST=5;
51 public static final int CONSTANT=6;
52 public static final int EXPRESSION=7;
53 public static final int HEADATOM=8;
54 public static final int ID=9;
55 public static final int PREDICATE=10;
56 public static final int PREFIX=11;
57 public static final int PREFIX_LIST=12;
58 public static final int RULE=13;
59 public static final int SCONSTANT=14;
60 public static final int STRING=15;
61 public static final int TERM_LIST=16;
62 public static final int URLSTRING=17;
63 public static final int VARIABLE=18;
64 public static final int WS=19;
65
66 // delegates
67 public Parser[] getDelegates() {
68 return new Parser[] {};
69 }
70
71 // delegators
72
73
74 public ConjunctiveQueryParser(TokenStream input) {
75 this(input, new RecognizerSharedState());
76 }
77 public ConjunctiveQueryParser(TokenStream input, RecognizerSharedState state) {
78 super(input, state);
79 }
80
81 protected TreeAdaptor adaptor = new CommonTreeAdaptor();
82
83 public void setTreeAdaptor(TreeAdaptor adaptor) {
84 this.adaptor = adaptor;
85 }
86 public TreeAdaptor getTreeAdaptor() {
87 return adaptor;
88 }
89 @Override public String[] getTokenNames() { return ConjunctiveQueryParser.tokenNames; }
90 @Override public String getGrammarFileName() { return "/home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g"; }
91
92
93
94
95
96 public ConjunctiveQueryParser(String string)
97 throws FileNotFoundException, IOException {
98 this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRStringStream(string))));
99 }
100
101 public ConjunctiveQueryParser(InputStream istream) throws FileNotFoundException, IOException {
102 this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(istream))));
103
104 }
105
106
107 public ConjunctiveQueryParser(File file) throws FileNotFoundException, IOException {
108 this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(new FileInputStream(file)))));
109
110 }
111
112 public ConjunctiveQuery parse() throws IllegalInputQueryException {
113 cq_return r = null;
114 try {
115 r = cq();
116 } catch (RecognitionException e) {
117 e.printStackTrace();
118 }
119 CommonTree t = (CommonTree) r.getTree();
120
121 CommonTreeNodeStream nodes = new CommonTreeNodeStream(t);
122 // AST nodes have payloads that point into token stream
123 nodes.setTokenStream(input);
124
125
126 ConjunctiveQueryWalker walker = new ConjunctiveQueryWalker();
127
128 ConjunctiveQuery cq = walker.walkExpressionNode(t);
129 return cq;
130 }
131
132 public ConjunctiveQuery parseCQ() throws IllegalInputQueryException {
133 return parse();
134 }
135
136
137
138 public static class cq_return extends ParserRuleReturnScope {
139 Object tree;
140 @Override
141 public Object getTree() { return tree; }
142 };
143
144
145 // $ANTLR start "cq"
146 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:97:1: cq : prefixlist rulebody -> ^( EXPRESSION prefixlist rulebody ) ;
147 public final ConjunctiveQueryParser.cq_return cq() throws RecognitionException {
148 ConjunctiveQueryParser.cq_return retval = new ConjunctiveQueryParser.cq_return();
149 retval.start = input.LT(1);
150
151 Object root_0 = null;
152
153 ParserRuleReturnScope prefixlist1 =null;
154 ParserRuleReturnScope rulebody2 =null;
155
156 RewriteRuleSubtreeStream stream_rulebody=new RewriteRuleSubtreeStream(adaptor,"rule rulebody");
157 RewriteRuleSubtreeStream stream_prefixlist=new RewriteRuleSubtreeStream(adaptor,"rule prefixlist");
158
159 try {
160 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:97:4: ( prefixlist rulebody -> ^( EXPRESSION prefixlist rulebody ) )
161 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:98:3: prefixlist rulebody
162 {
163 pushFollow(FOLLOW_prefixlist_in_cq132);
164 prefixlist1=prefixlist();
165 state._fsp--;
166
167 stream_prefixlist.add(prefixlist1.getTree());
168 pushFollow(FOLLOW_rulebody_in_cq134);
169 rulebody2=rulebody();
170 state._fsp--;
171
172 stream_rulebody.add(rulebody2.getTree());
173 // AST REWRITE
174 // elements: rulebody, prefixlist
175 // token labels:
176 // rule labels: retval
177 // token list labels:
178 // rule list labels:
179 // wildcard labels:
180 retval.tree = root_0;
181 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
182
183 root_0 = (Object)adaptor.nil();
184 // 98:23: -> ^( EXPRESSION prefixlist rulebody )
185 {
186 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:98:26: ^( EXPRESSION prefixlist rulebody )
187 {
188 Object root_1 = (Object)adaptor.nil();
189 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(EXPRESSION, "EXPRESSION"), root_1);
190 adaptor.addChild(root_1, stream_prefixlist.nextTree());
191 adaptor.addChild(root_1, stream_rulebody.nextTree());
192 adaptor.addChild(root_0, root_1);
193 }
194
195 }
196
197
198 retval.tree = root_0;
199
200 }
201
202 retval.stop = input.LT(-1);
203
204 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
205 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
206
207 }
208 catch (RecognitionException re) {
209 reportError(re);
210 recover(input,re);
211 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
212 }
213 finally {
214 // do for sure before leaving
215 }
216 return retval;
217 }
218 // $ANTLR end "cq"
219
220
221 public static class prefixlist_return extends ParserRuleReturnScope {
222 Object tree;
223 @Override
224 public Object getTree() { return tree; }
225 };
226
227
228 // $ANTLR start "prefixlist"
229 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:100:1: prefixlist : prefix ( ',' prefix )* -> ^( PREFIX_LIST ( prefix )* ) ;
230 public final ConjunctiveQueryParser.prefixlist_return prefixlist() throws RecognitionException {
231 ConjunctiveQueryParser.prefixlist_return retval = new ConjunctiveQueryParser.prefixlist_return();
232 retval.start = input.LT(1);
233
234 Object root_0 = null;
235
236 Token char_literal4=null;
237 ParserRuleReturnScope prefix3 =null;
238 ParserRuleReturnScope prefix5 =null;
239
240 Object char_literal4_tree=null;
241 RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22");
242 RewriteRuleSubtreeStream stream_prefix=new RewriteRuleSubtreeStream(adaptor,"rule prefix");
243
244 try {
245 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:100:11: ( prefix ( ',' prefix )* -> ^( PREFIX_LIST ( prefix )* ) )
246 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:3: prefix ( ',' prefix )*
247 {
248 pushFollow(FOLLOW_prefix_in_prefixlist154);
249 prefix3=prefix();
250 state._fsp--;
251
252 stream_prefix.add(prefix3.getTree());
253 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:10: ( ',' prefix )*
254 loop1:
255 while (true) {
256 int alt1=2;
257 int LA1_0 = input.LA(1);
258 if ( (LA1_0==22) ) {
259 alt1=1;
260 }
261
262 switch (alt1) {
263 case 1 :
264 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:11: ',' prefix
265 {
266 char_literal4=(Token)match(input,22,FOLLOW_22_in_prefixlist157);
267 stream_22.add(char_literal4);
268
269 pushFollow(FOLLOW_prefix_in_prefixlist159);
270 prefix5=prefix();
271 state._fsp--;
272
273 stream_prefix.add(prefix5.getTree());
274 }
275 break;
276
277 default :
278 break loop1;
279 }
280 }
281
282 // AST REWRITE
283 // elements: prefix
284 // token labels:
285 // rule labels: retval
286 // token list labels:
287 // rule list labels:
288 // wildcard labels:
289 retval.tree = root_0;
290 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
291
292 root_0 = (Object)adaptor.nil();
293 // 101:24: -> ^( PREFIX_LIST ( prefix )* )
294 {
295 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:27: ^( PREFIX_LIST ( prefix )* )
296 {
297 Object root_1 = (Object)adaptor.nil();
298 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(PREFIX_LIST, "PREFIX_LIST"), root_1);
299 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:41: ( prefix )*
300 while ( stream_prefix.hasNext() ) {
301 adaptor.addChild(root_1, stream_prefix.nextTree());
302 }
303 stream_prefix.reset();
304
305 adaptor.addChild(root_0, root_1);
306 }
307
308 }
309
310
311 retval.tree = root_0;
312
313 }
314
315 retval.stop = input.LT(-1);
316
317 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
318 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
319
320 }
321 catch (RecognitionException re) {
322 reportError(re);
323 recover(input,re);
324 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
325 }
326 finally {
327 // do for sure before leaving
328 }
329 return retval;
330 }
331 // $ANTLR end "prefixlist"
332
333
334 public static class prefix_return extends ParserRuleReturnScope {
335 Object tree;
336 @Override
337 public Object getTree() { return tree; }
338 };
339
340
341 // $ANTLR start "prefix"
342 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:103:1: prefix : 'prefix' id ':' '<' url '>' -> ^( PREFIX id url ) ;
343 public final ConjunctiveQueryParser.prefix_return prefix() throws RecognitionException {
344 ConjunctiveQueryParser.prefix_return retval = new ConjunctiveQueryParser.prefix_return();
345 retval.start = input.LT(1);
346
347 Object root_0 = null;
348
349 Token string_literal6=null;
350 Token char_literal8=null;
351 Token char_literal9=null;
352 Token char_literal11=null;
353 ParserRuleReturnScope id7 =null;
354 ParserRuleReturnScope url10 =null;
355
356 Object string_literal6_tree=null;
357 Object char_literal8_tree=null;
358 Object char_literal9_tree=null;
359 Object char_literal11_tree=null;
360 RewriteRuleTokenStream stream_24=new RewriteRuleTokenStream(adaptor,"token 24");
361 RewriteRuleTokenStream stream_25=new RewriteRuleTokenStream(adaptor,"token 25");
362 RewriteRuleTokenStream stream_27=new RewriteRuleTokenStream(adaptor,"token 27");
363 RewriteRuleTokenStream stream_29=new RewriteRuleTokenStream(adaptor,"token 29");
364 RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id");
365 RewriteRuleSubtreeStream stream_url=new RewriteRuleSubtreeStream(adaptor,"rule url");
366
367 try {
368 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:103:7: ( 'prefix' id ':' '<' url '>' -> ^( PREFIX id url ) )
369 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:104:3: 'prefix' id ':' '<' url '>'
370 {
371 string_literal6=(Token)match(input,29,FOLLOW_29_in_prefix181);
372 stream_29.add(string_literal6);
373
374 pushFollow(FOLLOW_id_in_prefix183);
375 id7=id();
376 state._fsp--;
377
378 stream_id.add(id7.getTree());
379 char_literal8=(Token)match(input,24,FOLLOW_24_in_prefix185);
380 stream_24.add(char_literal8);
381
382 char_literal9=(Token)match(input,25,FOLLOW_25_in_prefix187);
383 stream_25.add(char_literal9);
384
385 pushFollow(FOLLOW_url_in_prefix189);
386 url10=url();
387 state._fsp--;
388
389 stream_url.add(url10.getTree());
390 char_literal11=(Token)match(input,27,FOLLOW_27_in_prefix191);
391 stream_27.add(char_literal11);
392
393 // AST REWRITE
394 // elements: id, url
395 // token labels:
396 // rule labels: retval
397 // token list labels:
398 // rule list labels:
399 // wildcard labels:
400 retval.tree = root_0;
401 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
402
403 root_0 = (Object)adaptor.nil();
404 // 104:31: -> ^( PREFIX id url )
405 {
406 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:104:34: ^( PREFIX id url )
407 {
408 Object root_1 = (Object)adaptor.nil();
409 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(PREFIX, "PREFIX"), root_1);
410 adaptor.addChild(root_1, stream_id.nextTree());
411 adaptor.addChild(root_1, stream_url.nextTree());
412 adaptor.addChild(root_0, root_1);
413 }
414
415 }
416
417
418 retval.tree = root_0;
419
420 }
421
422 retval.stop = input.LT(-1);
423
424 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
425 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
426
427 }
428 catch (RecognitionException re) {
429 reportError(re);
430 recover(input,re);
431 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
432 }
433 finally {
434 // do for sure before leaving
435 }
436 return retval;
437 }
438 // $ANTLR end "prefix"
439
440
441 public static class rulebody_return extends ParserRuleReturnScope {
442 Object tree;
443 @Override
444 public Object getTree() { return tree; }
445 };
446
447
448 // $ANTLR start "rulebody"
449 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:107:1: rulebody : headatom ( '<-' | ':' ) body ( '.' )? -> ^( RULE headatom body ) ;
450 public final ConjunctiveQueryParser.rulebody_return rulebody() throws RecognitionException {
451 ConjunctiveQueryParser.rulebody_return retval = new ConjunctiveQueryParser.rulebody_return();
452 retval.start = input.LT(1);
453
454 Object root_0 = null;
455
456 Token string_literal13=null;
457 Token char_literal14=null;
458 Token char_literal16=null;
459 ParserRuleReturnScope headatom12 =null;
460 ParserRuleReturnScope body15 =null;
461
462 Object string_literal13_tree=null;
463 Object char_literal14_tree=null;
464 Object char_literal16_tree=null;
465 RewriteRuleTokenStream stream_23=new RewriteRuleTokenStream(adaptor,"token 23");
466 RewriteRuleTokenStream stream_24=new RewriteRuleTokenStream(adaptor,"token 24");
467 RewriteRuleTokenStream stream_26=new RewriteRuleTokenStream(adaptor,"token 26");
468 RewriteRuleSubtreeStream stream_headatom=new RewriteRuleSubtreeStream(adaptor,"rule headatom");
469 RewriteRuleSubtreeStream stream_body=new RewriteRuleSubtreeStream(adaptor,"rule body");
470
471 try {
472 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:107:9: ( headatom ( '<-' | ':' ) body ( '.' )? -> ^( RULE headatom body ) )
473 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:3: headatom ( '<-' | ':' ) body ( '.' )?
474 {
475 pushFollow(FOLLOW_headatom_in_rulebody213);
476 headatom12=headatom();
477 state._fsp--;
478
479 stream_headatom.add(headatom12.getTree());
480 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:12: ( '<-' | ':' )
481 int alt2=2;
482 int LA2_0 = input.LA(1);
483 if ( (LA2_0==26) ) {
484 alt2=1;
485 }
486 else if ( (LA2_0==24) ) {
487 alt2=2;
488 }
489
490 else {
491 NoViableAltException nvae =
492 new NoViableAltException("", 2, 0, input);
493 throw nvae;
494 }
495
496 switch (alt2) {
497 case 1 :
498 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:13: '<-'
499 {
500 string_literal13=(Token)match(input,26,FOLLOW_26_in_rulebody216);
501 stream_26.add(string_literal13);
502
503 }
504 break;
505 case 2 :
506 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:18: ':'
507 {
508 char_literal14=(Token)match(input,24,FOLLOW_24_in_rulebody218);
509 stream_24.add(char_literal14);
510
511 }
512 break;
513
514 }
515
516 pushFollow(FOLLOW_body_in_rulebody221);
517 body15=body();
518 state._fsp--;
519
520 stream_body.add(body15.getTree());
521 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:28: ( '.' )?
522 int alt3=2;
523 int LA3_0 = input.LA(1);
524 if ( (LA3_0==23) ) {
525 alt3=1;
526 }
527 switch (alt3) {
528 case 1 :
529 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:28: '.'
530 {
531 char_literal16=(Token)match(input,23,FOLLOW_23_in_rulebody223);
532 stream_23.add(char_literal16);
533
534 }
535 break;
536
537 }
538
539 // AST REWRITE
540 // elements: headatom, body
541 // token labels:
542 // rule labels: retval
543 // token list labels:
544 // rule list labels:
545 // wildcard labels:
546 retval.tree = root_0;
547 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
548
549 root_0 = (Object)adaptor.nil();
550 // 108:34: -> ^( RULE headatom body )
551 {
552 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:37: ^( RULE headatom body )
553 {
554 Object root_1 = (Object)adaptor.nil();
555 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(RULE, "RULE"), root_1);
556 adaptor.addChild(root_1, stream_headatom.nextTree());
557 adaptor.addChild(root_1, stream_body.nextTree());
558 adaptor.addChild(root_0, root_1);
559 }
560
561 }
562
563
564 retval.tree = root_0;
565
566 }
567
568 retval.stop = input.LT(-1);
569
570 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
571 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
572
573 }
574 catch (RecognitionException re) {
575 reportError(re);
576 recover(input,re);
577 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
578 }
579 finally {
580 // do for sure before leaving
581 }
582 return retval;
583 }
584 // $ANTLR end "rulebody"
585
586
587 public static class body_return extends ParserRuleReturnScope {
588 Object tree;
589 @Override
590 public Object getTree() { return tree; }
591 };
592
593
594 // $ANTLR start "body"
595 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:110:1: body : atom ( ',' atom )* -> ^( ATOM_LIST ( atom )* ) ;
596 public final ConjunctiveQueryParser.body_return body() throws RecognitionException {
597 ConjunctiveQueryParser.body_return retval = new ConjunctiveQueryParser.body_return();
598 retval.start = input.LT(1);
599
600 Object root_0 = null;
601
602 Token char_literal18=null;
603 ParserRuleReturnScope atom17 =null;
604 ParserRuleReturnScope atom19 =null;
605
606 Object char_literal18_tree=null;
607 RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22");
608 RewriteRuleSubtreeStream stream_atom=new RewriteRuleSubtreeStream(adaptor,"rule atom");
609
610 try {
611 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:110:5: ( atom ( ',' atom )* -> ^( ATOM_LIST ( atom )* ) )
612 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:3: atom ( ',' atom )*
613 {
614 pushFollow(FOLLOW_atom_in_body245);
615 atom17=atom();
616 state._fsp--;
617
618 stream_atom.add(atom17.getTree());
619 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:8: ( ',' atom )*
620 loop4:
621 while (true) {
622 int alt4=2;
623 int LA4_0 = input.LA(1);
624 if ( (LA4_0==22) ) {
625 alt4=1;
626 }
627
628 switch (alt4) {
629 case 1 :
630 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:9: ',' atom
631 {
632 char_literal18=(Token)match(input,22,FOLLOW_22_in_body248);
633 stream_22.add(char_literal18);
634
635 pushFollow(FOLLOW_atom_in_body250);
636 atom19=atom();
637 state._fsp--;
638
639 stream_atom.add(atom19.getTree());
640 }
641 break;
642
643 default :
644 break loop4;
645 }
646 }
647
648 // AST REWRITE
649 // elements: atom
650 // token labels:
651 // rule labels: retval
652 // token list labels:
653 // rule list labels:
654 // wildcard labels:
655 retval.tree = root_0;
656 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
657
658 root_0 = (Object)adaptor.nil();
659 // 111:20: -> ^( ATOM_LIST ( atom )* )
660 {
661 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:23: ^( ATOM_LIST ( atom )* )
662 {
663 Object root_1 = (Object)adaptor.nil();
664 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(ATOM_LIST, "ATOM_LIST"), root_1);
665 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:35: ( atom )*
666 while ( stream_atom.hasNext() ) {
667 adaptor.addChild(root_1, stream_atom.nextTree());
668 }
669 stream_atom.reset();
670
671 adaptor.addChild(root_0, root_1);
672 }
673
674 }
675
676
677 retval.tree = root_0;
678
679 }
680
681 retval.stop = input.LT(-1);
682
683 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
684 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
685
686 }
687 catch (RecognitionException re) {
688 reportError(re);
689 recover(input,re);
690 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
691 }
692 finally {
693 // do for sure before leaving
694 }
695 return retval;
696 }
697 // $ANTLR end "body"
698
699
700 public static class headatom_return extends ParserRuleReturnScope {
701 Object tree;
702 @Override
703 public Object getTree() { return tree; }
704 };
705
706
707 // $ANTLR start "headatom"
708 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:114:1: headatom : id '(' term ( ',' term )* ')' -> ^( HEADATOM ( term )* ) ;
709 public final ConjunctiveQueryParser.headatom_return headatom() throws RecognitionException {
710 ConjunctiveQueryParser.headatom_return retval = new ConjunctiveQueryParser.headatom_return();
711 retval.start = input.LT(1);
712
713 Object root_0 = null;
714
715 Token char_literal21=null;
716 Token char_literal23=null;
717 Token char_literal25=null;
718 ParserRuleReturnScope id20 =null;
719 ParserRuleReturnScope term22 =null;
720 ParserRuleReturnScope term24 =null;
721
722 Object char_literal21_tree=null;
723 Object char_literal23_tree=null;
724 Object char_literal25_tree=null;
725 RewriteRuleTokenStream stream_21=new RewriteRuleTokenStream(adaptor,"token 21");
726 RewriteRuleTokenStream stream_20=new RewriteRuleTokenStream(adaptor,"token 20");
727 RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22");
728 RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id");
729 RewriteRuleSubtreeStream stream_term=new RewriteRuleSubtreeStream(adaptor,"rule term");
730
731 try {
732 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:114:9: ( id '(' term ( ',' term )* ')' -> ^( HEADATOM ( term )* ) )
733 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:3: id '(' term ( ',' term )* ')'
734 {
735 pushFollow(FOLLOW_id_in_headatom276);
736 id20=id();
737 state._fsp--;
738
739 stream_id.add(id20.getTree());
740 char_literal21=(Token)match(input,20,FOLLOW_20_in_headatom278);
741 stream_20.add(char_literal21);
742
743 pushFollow(FOLLOW_term_in_headatom280);
744 term22=term();
745 state._fsp--;
746
747 stream_term.add(term22.getTree());
748 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:15: ( ',' term )*
749 loop5:
750 while (true) {
751 int alt5=2;
752 int LA5_0 = input.LA(1);
753 if ( (LA5_0==22) ) {
754 alt5=1;
755 }
756
757 switch (alt5) {
758 case 1 :
759 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:16: ',' term
760 {
761 char_literal23=(Token)match(input,22,FOLLOW_22_in_headatom283);
762 stream_22.add(char_literal23);
763
764 pushFollow(FOLLOW_term_in_headatom285);
765 term24=term();
766 state._fsp--;
767
768 stream_term.add(term24.getTree());
769 }
770 break;
771
772 default :
773 break loop5;
774 }
775 }
776
777 char_literal25=(Token)match(input,21,FOLLOW_21_in_headatom289);
778 stream_21.add(char_literal25);
779
780 // AST REWRITE
781 // elements: term
782 // token labels:
783 // rule labels: retval
784 // token list labels:
785 // rule list labels:
786 // wildcard labels:
787 retval.tree = root_0;
788 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
789
790 root_0 = (Object)adaptor.nil();
791 // 115:31: -> ^( HEADATOM ( term )* )
792 {
793 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:34: ^( HEADATOM ( term )* )
794 {
795 Object root_1 = (Object)adaptor.nil();
796 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(HEADATOM, "HEADATOM"), root_1);
797 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:45: ( term )*
798 while ( stream_term.hasNext() ) {
799 adaptor.addChild(root_1, stream_term.nextTree());
800 }
801 stream_term.reset();
802
803 adaptor.addChild(root_0, root_1);
804 }
805
806 }
807
808
809 retval.tree = root_0;
810
811 }
812
813 retval.stop = input.LT(-1);
814
815 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
816 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
817
818 }
819 catch (RecognitionException re) {
820 reportError(re);
821 recover(input,re);
822 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
823 }
824 finally {
825 // do for sure before leaving
826 }
827 return retval;
828 }
829 // $ANTLR end "headatom"
830
831
832 public static class atom_return extends ParserRuleReturnScope {
833 Object tree;
834 @Override
835 public Object getTree() { return tree; }
836 };
837
838
839 // $ANTLR start "atom"
840 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:117:1: atom : compositeid '(' term ( ',' term )* ')' -> ^( ATOM compositeid ( term )* ) ;
841 public final ConjunctiveQueryParser.atom_return atom() throws RecognitionException {
842 ConjunctiveQueryParser.atom_return retval = new ConjunctiveQueryParser.atom_return();
843 retval.start = input.LT(1);
844
845 Object root_0 = null;
846
847 Token char_literal27=null;
848 Token char_literal29=null;
849 Token char_literal31=null;
850 ParserRuleReturnScope compositeid26 =null;
851 ParserRuleReturnScope term28 =null;
852 ParserRuleReturnScope term30 =null;
853
854 Object char_literal27_tree=null;
855 Object char_literal29_tree=null;
856 Object char_literal31_tree=null;
857 RewriteRuleTokenStream stream_21=new RewriteRuleTokenStream(adaptor,"token 21");
858 RewriteRuleTokenStream stream_20=new RewriteRuleTokenStream(adaptor,"token 20");
859 RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22");
860 RewriteRuleSubtreeStream stream_term=new RewriteRuleSubtreeStream(adaptor,"rule term");
861 RewriteRuleSubtreeStream stream_compositeid=new RewriteRuleSubtreeStream(adaptor,"rule compositeid");
862
863 try {
864 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:117:5: ( compositeid '(' term ( ',' term )* ')' -> ^( ATOM compositeid ( term )* ) )
865 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:3: compositeid '(' term ( ',' term )* ')'
866 {
867 pushFollow(FOLLOW_compositeid_in_atom309);
868 compositeid26=compositeid();
869 state._fsp--;
870
871 stream_compositeid.add(compositeid26.getTree());
872 char_literal27=(Token)match(input,20,FOLLOW_20_in_atom311);
873 stream_20.add(char_literal27);
874
875 pushFollow(FOLLOW_term_in_atom313);
876 term28=term();
877 state._fsp--;
878
879 stream_term.add(term28.getTree());
880 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:24: ( ',' term )*
881 loop6:
882 while (true) {
883 int alt6=2;
884 int LA6_0 = input.LA(1);
885 if ( (LA6_0==22) ) {
886 alt6=1;
887 }
888
889 switch (alt6) {
890 case 1 :
891 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:25: ',' term
892 {
893 char_literal29=(Token)match(input,22,FOLLOW_22_in_atom316);
894 stream_22.add(char_literal29);
895
896 pushFollow(FOLLOW_term_in_atom318);
897 term30=term();
898 state._fsp--;
899
900 stream_term.add(term30.getTree());
901 }
902 break;
903
904 default :
905 break loop6;
906 }
907 }
908
909 char_literal31=(Token)match(input,21,FOLLOW_21_in_atom322);
910 stream_21.add(char_literal31);
911
912 // AST REWRITE
913 // elements: term, compositeid
914 // token labels:
915 // rule labels: retval
916 // token list labels:
917 // rule list labels:
918 // wildcard labels:
919 retval.tree = root_0;
920 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
921
922 root_0 = (Object)adaptor.nil();
923 // 118:40: -> ^( ATOM compositeid ( term )* )
924 {
925 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:43: ^( ATOM compositeid ( term )* )
926 {
927 Object root_1 = (Object)adaptor.nil();
928 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(ATOM, "ATOM"), root_1);
929 adaptor.addChild(root_1, stream_compositeid.nextTree());
930 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:62: ( term )*
931 while ( stream_term.hasNext() ) {
932 adaptor.addChild(root_1, stream_term.nextTree());
933 }
934 stream_term.reset();
935
936 adaptor.addChild(root_0, root_1);
937 }
938
939 }
940
941
942 retval.tree = root_0;
943
944 }
945
946 retval.stop = input.LT(-1);
947
948 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
949 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
950
951 }
952 catch (RecognitionException re) {
953 reportError(re);
954 recover(input,re);
955 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
956 }
957 finally {
958 // do for sure before leaving
959 }
960 return retval;
961 }
962 // $ANTLR end "atom"
963
964
965 public static class compositeid_return extends ParserRuleReturnScope {
966 Object tree;
967 @Override
968 public Object getTree() { return tree; }
969 };
970
971
972 // $ANTLR start "compositeid"
973 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:120:1: compositeid : ( id ) ':' ( id ) -> ^( ID id id ) ;
974 public final ConjunctiveQueryParser.compositeid_return compositeid() throws RecognitionException {
975 ConjunctiveQueryParser.compositeid_return retval = new ConjunctiveQueryParser.compositeid_return();
976 retval.start = input.LT(1);
977
978 Object root_0 = null;
979
980 Token char_literal33=null;
981 ParserRuleReturnScope id32 =null;
982 ParserRuleReturnScope id34 =null;
983
984 Object char_literal33_tree=null;
985 RewriteRuleTokenStream stream_24=new RewriteRuleTokenStream(adaptor,"token 24");
986 RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id");
987
988 try {
989 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:120:12: ( ( id ) ':' ( id ) -> ^( ID id id ) )
990 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:2: ( id ) ':' ( id )
991 {
992 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:2: ( id )
993 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:3: id
994 {
995 pushFollow(FOLLOW_id_in_compositeid342);
996 id32=id();
997 state._fsp--;
998
999 stream_id.add(id32.getTree());
1000 }
1001
1002 char_literal33=(Token)match(input,24,FOLLOW_24_in_compositeid345);
1003 stream_24.add(char_literal33);
1004
1005 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:11: ( id )
1006 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:12: id
1007 {
1008 pushFollow(FOLLOW_id_in_compositeid348);
1009 id34=id();
1010 state._fsp--;
1011
1012 stream_id.add(id34.getTree());
1013 }
1014
1015 // AST REWRITE
1016 // elements: id, id
1017 // token labels:
1018 // rule labels: retval
1019 // token list labels:
1020 // rule list labels:
1021 // wildcard labels:
1022 retval.tree = root_0;
1023 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
1024
1025 root_0 = (Object)adaptor.nil();
1026 // 121:16: -> ^( ID id id )
1027 {
1028 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:19: ^( ID id id )
1029 {
1030 Object root_1 = (Object)adaptor.nil();
1031 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(ID, "ID"), root_1);
1032 adaptor.addChild(root_1, stream_id.nextTree());
1033 adaptor.addChild(root_1, stream_id.nextTree());
1034 adaptor.addChild(root_0, root_1);
1035 }
1036
1037 }
1038
1039
1040 retval.tree = root_0;
1041
1042 }
1043
1044 retval.stop = input.LT(-1);
1045
1046 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
1047 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
1048
1049 }
1050 catch (RecognitionException re) {
1051 reportError(re);
1052 recover(input,re);
1053 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
1054 }
1055 finally {
1056 // do for sure before leaving
1057 }
1058 return retval;
1059 }
1060 // $ANTLR end "compositeid"
1061
1062
1063 public static class term_return extends ParserRuleReturnScope {
1064 Object tree;
1065 @Override
1066 public Object getTree() { return tree; }
1067 };
1068
1069
1070 // $ANTLR start "term"
1071 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:124:1: term : ( variable -> ^( VARIABLE variable ) | simpleid -> ^( SCONSTANT simpleid ) | compositeid -> ^( CONSTANT compositeid ) );
1072 public final ConjunctiveQueryParser.term_return term() throws RecognitionException {
1073 ConjunctiveQueryParser.term_return retval = new ConjunctiveQueryParser.term_return();
1074 retval.start = input.LT(1);
1075
1076 Object root_0 = null;
1077
1078 ParserRuleReturnScope variable35 =null;
1079 ParserRuleReturnScope simpleid36 =null;
1080 ParserRuleReturnScope compositeid37 =null;
1081
1082 RewriteRuleSubtreeStream stream_simpleid=new RewriteRuleSubtreeStream(adaptor,"rule simpleid");
1083 RewriteRuleSubtreeStream stream_compositeid=new RewriteRuleSubtreeStream(adaptor,"rule compositeid");
1084 RewriteRuleSubtreeStream stream_variable=new RewriteRuleSubtreeStream(adaptor,"rule variable");
1085
1086 try {
1087 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:124:5: ( variable -> ^( VARIABLE variable ) | simpleid -> ^( SCONSTANT simpleid ) | compositeid -> ^( CONSTANT compositeid ) )
1088 int alt7=3;
1089 switch ( input.LA(1) ) {
1090 case 28:
1091 {
1092 alt7=1;
1093 }
1094 break;
1095 case 25:
1096 {
1097 alt7=2;
1098 }
1099 break;
1100 case STRING:
1101 {
1102 alt7=3;
1103 }
1104 break;
1105 default:
1106 NoViableAltException nvae =
1107 new NoViableAltException("", 7, 0, input);
1108 throw nvae;
1109 }
1110 switch (alt7) {
1111 case 1 :
1112 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:125:3: variable
1113 {
1114 pushFollow(FOLLOW_variable_in_term371);
1115 variable35=variable();
1116 state._fsp--;
1117
1118 stream_variable.add(variable35.getTree());
1119 // AST REWRITE
1120 // elements: variable
1121 // token labels:
1122 // rule labels: retval
1123 // token list labels:
1124 // rule list labels:
1125 // wildcard labels:
1126 retval.tree = root_0;
1127 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
1128
1129 root_0 = (Object)adaptor.nil();
1130 // 125:12: -> ^( VARIABLE variable )
1131 {
1132 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:125:15: ^( VARIABLE variable )
1133 {
1134 Object root_1 = (Object)adaptor.nil();
1135 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(VARIABLE, "VARIABLE"), root_1);
1136 adaptor.addChild(root_1, stream_variable.nextTree());
1137 adaptor.addChild(root_0, root_1);
1138 }
1139
1140 }
1141
1142
1143 retval.tree = root_0;
1144
1145 }
1146 break;
1147 case 2 :
1148 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:126:5: simpleid
1149 {
1150 pushFollow(FOLLOW_simpleid_in_term386);
1151 simpleid36=simpleid();
1152 state._fsp--;
1153
1154 stream_simpleid.add(simpleid36.getTree());
1155 // AST REWRITE
1156 // elements: simpleid
1157 // token labels:
1158 // rule labels: retval
1159 // token list labels:
1160 // rule list labels:
1161 // wildcard labels:
1162 retval.tree = root_0;
1163 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
1164
1165 root_0 = (Object)adaptor.nil();
1166 // 126:14: -> ^( SCONSTANT simpleid )
1167 {
1168 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:126:17: ^( SCONSTANT simpleid )
1169 {
1170 Object root_1 = (Object)adaptor.nil();
1171 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(SCONSTANT, "SCONSTANT"), root_1);
1172 adaptor.addChild(root_1, stream_simpleid.nextTree());
1173 adaptor.addChild(root_0, root_1);
1174 }
1175
1176 }
1177
1178
1179 retval.tree = root_0;
1180
1181 }
1182 break;
1183 case 3 :
1184 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:127:5: compositeid
1185 {
1186 pushFollow(FOLLOW_compositeid_in_term400);
1187 compositeid37=compositeid();
1188 state._fsp--;
1189
1190 stream_compositeid.add(compositeid37.getTree());
1191 // AST REWRITE
1192 // elements: compositeid
1193 // token labels:
1194 // rule labels: retval
1195 // token list labels:
1196 // rule list labels:
1197 // wildcard labels:
1198 retval.tree = root_0;
1199 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
1200
1201 root_0 = (Object)adaptor.nil();
1202 // 127:17: -> ^( CONSTANT compositeid )
1203 {
1204 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:127:20: ^( CONSTANT compositeid )
1205 {
1206 Object root_1 = (Object)adaptor.nil();
1207 root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(CONSTANT, "CONSTANT"), root_1);
1208 adaptor.addChild(root_1, stream_compositeid.nextTree());
1209 adaptor.addChild(root_0, root_1);
1210 }
1211
1212 }
1213
1214
1215 retval.tree = root_0;
1216
1217 }
1218 break;
1219
1220 }
1221 retval.stop = input.LT(-1);
1222
1223 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
1224 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
1225
1226 }
1227 catch (RecognitionException re) {
1228 reportError(re);
1229 recover(input,re);
1230 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
1231 }
1232 finally {
1233 // do for sure before leaving
1234 }
1235 return retval;
1236 }
1237 // $ANTLR end "term"
1238
1239
1240 public static class id_return extends ParserRuleReturnScope {
1241 Object tree;
1242 @Override
1243 public Object getTree() { return tree; }
1244 };
1245
1246
1247 // $ANTLR start "id"
1248 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:1: id : ( STRING ) ;
1249 public final ConjunctiveQueryParser.id_return id() throws RecognitionException {
1250 ConjunctiveQueryParser.id_return retval = new ConjunctiveQueryParser.id_return();
1251 retval.start = input.LT(1);
1252
1253 Object root_0 = null;
1254
1255 Token STRING38=null;
1256
1257 Object STRING38_tree=null;
1258
1259 try {
1260 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:4: ( ( STRING ) )
1261 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:6: ( STRING )
1262 {
1263 root_0 = (Object)adaptor.nil();
1264
1265
1266 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:6: ( STRING )
1267 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:7: STRING
1268 {
1269 STRING38=(Token)match(input,STRING,FOLLOW_STRING_in_id417);
1270 STRING38_tree = (Object)adaptor.create(STRING38);
1271 adaptor.addChild(root_0, STRING38_tree);
1272
1273 }
1274
1275 }
1276
1277 retval.stop = input.LT(-1);
1278
1279 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
1280 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
1281
1282 }
1283 catch (RecognitionException re) {
1284 reportError(re);
1285 recover(input,re);
1286 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
1287 }
1288 finally {
1289 // do for sure before leaving
1290 }
1291 return retval;
1292 }
1293 // $ANTLR end "id"
1294
1295
1296 public static class simpleid_return extends ParserRuleReturnScope {
1297 Object tree;
1298 @Override
1299 public Object getTree() { return tree; }
1300 };
1301
1302
1303 // $ANTLR start "simpleid"
1304 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:1: simpleid : ( '<' URLSTRING '>' | '<' STRING '>' );
1305 public final ConjunctiveQueryParser.simpleid_return simpleid() throws RecognitionException {
1306 ConjunctiveQueryParser.simpleid_return retval = new ConjunctiveQueryParser.simpleid_return();
1307 retval.start = input.LT(1);
1308
1309 Object root_0 = null;
1310
1311 Token char_literal39=null;
1312 Token URLSTRING40=null;
1313 Token char_literal41=null;
1314 Token char_literal42=null;
1315 Token STRING43=null;
1316 Token char_literal44=null;
1317
1318 Object char_literal39_tree=null;
1319 Object URLSTRING40_tree=null;
1320 Object char_literal41_tree=null;
1321 Object char_literal42_tree=null;
1322 Object STRING43_tree=null;
1323 Object char_literal44_tree=null;
1324
1325 try {
1326 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:10: ( '<' URLSTRING '>' | '<' STRING '>' )
1327 int alt8=2;
1328 int LA8_0 = input.LA(1);
1329 if ( (LA8_0==25) ) {
1330 int LA8_1 = input.LA(2);
1331 if ( (LA8_1==URLSTRING) ) {
1332 alt8=1;
1333 }
1334 else if ( (LA8_1==STRING) ) {
1335 alt8=2;
1336 }
1337
1338 else {
1339 int nvaeMark = input.mark();
1340 try {
1341 input.consume();
1342 NoViableAltException nvae =
1343 new NoViableAltException("", 8, 1, input);
1344 throw nvae;
1345 } finally {
1346 input.rewind(nvaeMark);
1347 }
1348 }
1349
1350 }
1351
1352 else {
1353 NoViableAltException nvae =
1354 new NoViableAltException("", 8, 0, input);
1355 throw nvae;
1356 }
1357
1358 switch (alt8) {
1359 case 1 :
1360 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:12: '<' URLSTRING '>'
1361 {
1362 root_0 = (Object)adaptor.nil();
1363
1364
1365 char_literal39=(Token)match(input,25,FOLLOW_25_in_simpleid425);
1366 char_literal39_tree = (Object)adaptor.create(char_literal39);
1367 adaptor.addChild(root_0, char_literal39_tree);
1368
1369 URLSTRING40=(Token)match(input,URLSTRING,FOLLOW_URLSTRING_in_simpleid427);
1370 URLSTRING40_tree = (Object)adaptor.create(URLSTRING40);
1371 adaptor.addChild(root_0, URLSTRING40_tree);
1372
1373 char_literal41=(Token)match(input,27,FOLLOW_27_in_simpleid429);
1374 char_literal41_tree = (Object)adaptor.create(char_literal41);
1375 adaptor.addChild(root_0, char_literal41_tree);
1376
1377 }
1378 break;
1379 case 2 :
1380 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:32: '<' STRING '>'
1381 {
1382 root_0 = (Object)adaptor.nil();
1383
1384
1385 char_literal42=(Token)match(input,25,FOLLOW_25_in_simpleid433);
1386 char_literal42_tree = (Object)adaptor.create(char_literal42);
1387 adaptor.addChild(root_0, char_literal42_tree);
1388
1389 STRING43=(Token)match(input,STRING,FOLLOW_STRING_in_simpleid435);
1390 STRING43_tree = (Object)adaptor.create(STRING43);
1391 adaptor.addChild(root_0, STRING43_tree);
1392
1393 char_literal44=(Token)match(input,27,FOLLOW_27_in_simpleid437);
1394 char_literal44_tree = (Object)adaptor.create(char_literal44);
1395 adaptor.addChild(root_0, char_literal44_tree);
1396
1397 }
1398 break;
1399
1400 }
1401 retval.stop = input.LT(-1);
1402
1403 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
1404 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
1405
1406 }
1407 catch (RecognitionException re) {
1408 reportError(re);
1409 recover(input,re);
1410 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
1411 }
1412 finally {
1413 // do for sure before leaving
1414 }
1415 return retval;
1416 }
1417 // $ANTLR end "simpleid"
1418
1419
1420 public static class variable_return extends ParserRuleReturnScope {
1421 Object tree;
1422 @Override
1423 public Object getTree() { return tree; }
1424 };
1425
1426
1427 // $ANTLR start "variable"
1428 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:133:1: variable : ( '?' ) id -> ^( id ) ;
1429 public final ConjunctiveQueryParser.variable_return variable() throws RecognitionException {
1430 ConjunctiveQueryParser.variable_return retval = new ConjunctiveQueryParser.variable_return();
1431 retval.start = input.LT(1);
1432
1433 Object root_0 = null;
1434
1435 Token char_literal45=null;
1436 ParserRuleReturnScope id46 =null;
1437
1438 Object char_literal45_tree=null;
1439 RewriteRuleTokenStream stream_28=new RewriteRuleTokenStream(adaptor,"token 28");
1440 RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id");
1441
1442 try {
1443 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:133:9: ( ( '?' ) id -> ^( id ) )
1444 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:3: ( '?' ) id
1445 {
1446 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:3: ( '?' )
1447 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:4: '?'
1448 {
1449 char_literal45=(Token)match(input,28,FOLLOW_28_in_variable448);
1450 stream_28.add(char_literal45);
1451
1452 }
1453
1454 pushFollow(FOLLOW_id_in_variable451);
1455 id46=id();
1456 state._fsp--;
1457
1458 stream_id.add(id46.getTree());
1459 // AST REWRITE
1460 // elements: id
1461 // token labels:
1462 // rule labels: retval
1463 // token list labels:
1464 // rule list labels:
1465 // wildcard labels:
1466 retval.tree = root_0;
1467 RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null);
1468
1469 root_0 = (Object)adaptor.nil();
1470 // 134:12: -> ^( id )
1471 {
1472 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:15: ^( id )
1473 {
1474 Object root_1 = (Object)adaptor.nil();
1475 root_1 = (Object)adaptor.becomeRoot(stream_id.nextNode(), root_1);
1476 adaptor.addChild(root_0, root_1);
1477 }
1478
1479 }
1480
1481
1482 retval.tree = root_0;
1483
1484 }
1485
1486 retval.stop = input.LT(-1);
1487
1488 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
1489 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
1490
1491 }
1492 catch (RecognitionException re) {
1493 reportError(re);
1494 recover(input,re);
1495 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
1496 }
1497 finally {
1498 // do for sure before leaving
1499 }
1500 return retval;
1501 }
1502 // $ANTLR end "variable"
1503
1504
1505 public static class url_return extends ParserRuleReturnScope {
1506 Object tree;
1507 @Override
1508 public Object getTree() { return tree; }
1509 };
1510
1511
1512 // $ANTLR start "url"
1513 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:2: url : ( URLSTRING ) ;
1514 public final ConjunctiveQueryParser.url_return url() throws RecognitionException {
1515 ConjunctiveQueryParser.url_return retval = new ConjunctiveQueryParser.url_return();
1516 retval.start = input.LT(1);
1517
1518 Object root_0 = null;
1519
1520 Token URLSTRING47=null;
1521
1522 Object URLSTRING47_tree=null;
1523
1524 try {
1525 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:6: ( ( URLSTRING ) )
1526 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:8: ( URLSTRING )
1527 {
1528 root_0 = (Object)adaptor.nil();
1529
1530
1531 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:8: ( URLSTRING )
1532 // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:9: URLSTRING
1533 {
1534 URLSTRING47=(Token)match(input,URLSTRING,FOLLOW_URLSTRING_in_url469);
1535 URLSTRING47_tree = (Object)adaptor.create(URLSTRING47);
1536 adaptor.addChild(root_0, URLSTRING47_tree);
1537
1538 }
1539
1540 }
1541
1542 retval.stop = input.LT(-1);
1543
1544 retval.tree = (Object)adaptor.rulePostProcessing(root_0);
1545 adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
1546
1547 }
1548 catch (RecognitionException re) {
1549 reportError(re);
1550 recover(input,re);
1551 retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
1552 }
1553 finally {
1554 // do for sure before leaving
1555 }
1556 return retval;
1557 }
1558 // $ANTLR end "url"
1559
1560 // Delegated rules
1561
1562
1563
1564 public static final BitSet FOLLOW_prefixlist_in_cq132 = new BitSet(new long[]{0x0000000000008000L});
1565 public static final BitSet FOLLOW_rulebody_in_cq134 = new BitSet(new long[]{0x0000000000000002L});
1566 public static final BitSet FOLLOW_prefix_in_prefixlist154 = new BitSet(new long[]{0x0000000000400002L});
1567 public static final BitSet FOLLOW_22_in_prefixlist157 = new BitSet(new long[]{0x0000000020000000L});
1568 public static final BitSet FOLLOW_prefix_in_prefixlist159 = new BitSet(new long[]{0x0000000000400002L});
1569 public static final BitSet FOLLOW_29_in_prefix181 = new BitSet(new long[]{0x0000000000008000L});
1570 public static final BitSet FOLLOW_id_in_prefix183 = new BitSet(new long[]{0x0000000001000000L});
1571 public static final BitSet FOLLOW_24_in_prefix185 = new BitSet(new long[]{0x0000000002000000L});
1572 public static final BitSet FOLLOW_25_in_prefix187 = new BitSet(new long[]{0x0000000000020000L});
1573 public static final BitSet FOLLOW_url_in_prefix189 = new BitSet(new long[]{0x0000000008000000L});
1574 public static final BitSet FOLLOW_27_in_prefix191 = new BitSet(new long[]{0x0000000000000002L});
1575 public static final BitSet FOLLOW_headatom_in_rulebody213 = new BitSet(new long[]{0x0000000005000000L});
1576 public static final BitSet FOLLOW_26_in_rulebody216 = new BitSet(new long[]{0x0000000000008000L});
1577 public static final BitSet FOLLOW_24_in_rulebody218 = new BitSet(new long[]{0x0000000000008000L});
1578 public static final BitSet FOLLOW_body_in_rulebody221 = new BitSet(new long[]{0x0000000000800002L});
1579 public static final BitSet FOLLOW_23_in_rulebody223 = new BitSet(new long[]{0x0000000000000002L});
1580 public static final BitSet FOLLOW_atom_in_body245 = new BitSet(new long[]{0x0000000000400002L});
1581 public static final BitSet FOLLOW_22_in_body248 = new BitSet(new long[]{0x0000000000008000L});
1582 public static final BitSet FOLLOW_atom_in_body250 = new BitSet(new long[]{0x0000000000400002L});
1583 public static final BitSet FOLLOW_id_in_headatom276 = new BitSet(new long[]{0x0000000000100000L});
1584 public static final BitSet FOLLOW_20_in_headatom278 = new BitSet(new long[]{0x0000000012008000L});
1585 public static final BitSet FOLLOW_term_in_headatom280 = new BitSet(new long[]{0x0000000000600000L});
1586 public static final BitSet FOLLOW_22_in_headatom283 = new BitSet(new long[]{0x0000000012008000L});
1587 public static final BitSet FOLLOW_term_in_headatom285 = new BitSet(new long[]{0x0000000000600000L});
1588 public static final BitSet FOLLOW_21_in_headatom289 = new BitSet(new long[]{0x0000000000000002L});
1589 public static final BitSet FOLLOW_compositeid_in_atom309 = new BitSet(new long[]{0x0000000000100000L});
1590 public static final BitSet FOLLOW_20_in_atom311 = new BitSet(new long[]{0x0000000012008000L});
1591 public static final BitSet FOLLOW_term_in_atom313 = new BitSet(new long[]{0x0000000000600000L});
1592 public static final BitSet FOLLOW_22_in_atom316 = new BitSet(new long[]{0x0000000012008000L});
1593 public static final BitSet FOLLOW_term_in_atom318 = new BitSet(new long[]{0x0000000000600000L});
1594 public static final BitSet FOLLOW_21_in_atom322 = new BitSet(new long[]{0x0000000000000002L});
1595 public static final BitSet FOLLOW_id_in_compositeid342 = new BitSet(new long[]{0x0000000001000000L});
1596 public static final BitSet FOLLOW_24_in_compositeid345 = new BitSet(new long[]{0x0000000000008000L});
1597 public static final BitSet FOLLOW_id_in_compositeid348 = new BitSet(new long[]{0x0000000000000002L});
1598 public static final BitSet FOLLOW_variable_in_term371 = new BitSet(new long[]{0x0000000000000002L});
1599 public static final BitSet FOLLOW_simpleid_in_term386 = new BitSet(new long[]{0x0000000000000002L});
1600 public static final BitSet FOLLOW_compositeid_in_term400 = new BitSet(new long[]{0x0000000000000002L});
1601 public static final BitSet FOLLOW_STRING_in_id417 = new BitSet(new long[]{0x0000000000000002L});
1602 public static final BitSet FOLLOW_25_in_simpleid425 = new BitSet(new long[]{0x0000000000020000L});
1603 public static final BitSet FOLLOW_URLSTRING_in_simpleid427 = new BitSet(new long[]{0x0000000008000000L});
1604 public static final BitSet FOLLOW_27_in_simpleid429 = new BitSet(new long[]{0x0000000000000002L});
1605 public static final BitSet FOLLOW_25_in_simpleid433 = new BitSet(new long[]{0x0000000000008000L});
1606 public static final BitSet FOLLOW_STRING_in_simpleid435 = new BitSet(new long[]{0x0000000008000000L});
1607 public static final BitSet FOLLOW_27_in_simpleid437 = new BitSet(new long[]{0x0000000000000002L});
1608 public static final BitSet FOLLOW_28_in_variable448 = new BitSet(new long[]{0x0000000000008000L});
1609 public static final BitSet FOLLOW_id_in_variable451 = new BitSet(new long[]{0x0000000000000002L});
1610 public static final BitSet FOLLOW_URLSTRING_in_url469 = new BitSet(new long[]{0x0000000000000002L});
1611}
diff --git a/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java
new file mode 100644
index 0000000..f95ac12
--- /dev/null
+++ b/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java
@@ -0,0 +1,179 @@
1package org.semanticweb.karma2.model.cqparser;
2
3
4
5import java.util.ArrayList;
6import java.util.Iterator;
7import java.util.List;
8
9import org.antlr.runtime.tree.CommonTree;
10import org.semanticweb.HermiT.model.Atom;
11import org.semanticweb.HermiT.model.AtomicConcept;
12import org.semanticweb.HermiT.model.AtomicRole;
13import org.semanticweb.HermiT.model.Individual;
14import org.semanticweb.HermiT.model.Term;
15import org.semanticweb.HermiT.model.Variable;
16import org.semanticweb.karma2.exception.IllegalInputQueryException;
17import org.semanticweb.karma2.model.ConjunctiveQuery;
18
19import uk.ac.ox.cs.JRDFox.Prefixes;
20import uk.ac.ox.cs.pagoda.util.Utility;
21
22
23
24
25public class ConjunctiveQueryWalker {
26
27
28 public ConjunctiveQueryWalker() {
29
30 }
31
32 @SuppressWarnings("unchecked")
33 private List<CommonTree> childrenOf(CommonTree node) {
34 return (List<CommonTree>) node.getChildren();
35 }
36
37
38 private boolean isSafe(Term[] headTerms, Atom[] atoms) {
39 for (Term t : headTerms) {
40 if (t instanceof Variable) {
41 boolean res = false;
42 for (Atom a : atoms) {
43 if (a.getArity()==1) {
44 if (a.getArgument(0).equals(t))
45 res = true;
46 }
47 if (a.getArity()==2) {
48 if (a.getArgument(0).equals(t) || a.getArgument(1).equals(t))
49 res = true;
50 }
51 }
52 if(!res)
53 return false;
54 }
55 }
56 return true;
57 }
58
59
60 public ConjunctiveQuery walkExpressionNode(CommonTree ruleNode) throws IllegalInputQueryException {
61
62 assert (ruleNode.getType() == ConjunctiveQueryLexer.EXPRESSION);
63
64 Iterator<CommonTree> iterator = childrenOf(ruleNode).iterator();
65
66 CommonTree prefixList = iterator.next();
67 assert (prefixList.getType() == ConjunctiveQueryLexer.PREFIX_LIST);
68 Prefixes prefixes = walkPrefixList(prefixList);
69 CommonTree rulebody = iterator.next();
70 assert (prefixList.getType() == ConjunctiveQueryLexer.RULE);
71 return walkRuleNode(rulebody, prefixes);
72
73 }
74
75 public Prefixes walkPrefixList(CommonTree prefixlist) throws IllegalInputQueryException {
76 assert (prefixlist.getType() == ConjunctiveQueryLexer.PREFIX_LIST);
77 Prefixes pref = new Prefixes();
78 for (CommonTree prefixNode : childrenOf(prefixlist)) {
79 walkPrefixNode(prefixNode, pref);
80 }
81 return pref;
82
83 }
84
85 private void walkPrefixNode(CommonTree prefixNode, Prefixes pref) throws IllegalInputQueryException {
86 Iterator<CommonTree> iterator = childrenOf(prefixNode).iterator();
87 CommonTree shortID = iterator.next();
88 CommonTree longID = iterator.next();
89 pref.declarePrefix(shortID.getText() + ":", longID.getText());
90 }
91
92
93 public ConjunctiveQuery walkRuleNode(CommonTree ruleNode, Prefixes prefixes) throws IllegalInputQueryException {
94
95 assert (ruleNode.getType() == ConjunctiveQueryLexer.RULE);
96
97 Iterator<CommonTree> iterator = childrenOf(ruleNode).iterator();
98
99 CommonTree headNode = iterator.next();
100 assert (headNode.getType() == ConjunctiveQueryLexer.HEADATOM);
101 Term[] headTerms = walkHeadAtomNode(headNode);
102 Atom[] atoms = walkAtomList(iterator.next());
103 if (!isSafe(headTerms, atoms))
104 throw new IllegalInputQueryException("query is not safe");
105 return new ConjunctiveQuery(atoms, headTerms, prefixes);
106
107 }
108
109 private Term[] walkHeadAtomNode(CommonTree node) throws IllegalInputQueryException {
110 List<Term> terms = new ArrayList<Term>();
111 for (CommonTree termNode : childrenOf(node)) {
112 terms.add(walkTermNode(termNode));
113 }
114 return terms.toArray(new Term[terms.size()]);
115 }
116
117
118 private String walkCompositeId(CommonTree compositeID) {
119 Iterator<CommonTree> iterator = childrenOf(compositeID).iterator();
120 return iterator.next().getText() + ":" + iterator.next().getText() ;
121 }
122
123 private String walkSimpleId(CommonTree termNode) {
124 Iterator<CommonTree> it = childrenOf(termNode).iterator();
125 it.next();
126 CommonTree t = it.next();
127 return t.getText();
128 }
129
130 private Term walkTermNode(CommonTree termNode) throws IllegalInputQueryException {
131 if (termNode.getType() == ConjunctiveQueryLexer.VARIABLE) {
132 return Variable.create("?" + childrenOf(termNode).iterator().next().getText());
133 }
134 if (termNode.getType() == ConjunctiveQueryLexer.CONSTANT) {
135 Individual newind = Individual.create(walkCompositeId(childrenOf(termNode).iterator().next()));
136 Utility.logError(newind);
137 return newind;
138 }
139 if (termNode.getType() == ConjunctiveQueryLexer.SCONSTANT) {
140 Individual newind = Individual.create(walkSimpleId(termNode));
141 return newind;
142 }
143 throw new IllegalArgumentException();
144 }
145
146
147
148 public Atom[] walkAtomList(CommonTree node) throws IllegalInputQueryException {
149 assert (node.getType() == ConjunctiveQueryLexer.ATOM_LIST);
150 List<Atom> atoms = new ArrayList<Atom>();
151 for (CommonTree atomNode : childrenOf(node)) {
152 atoms.add(walkAtomNode(atomNode));
153 }
154 return atoms.toArray(new Atom[atoms.size()]);
155
156 }
157
158 private Atom walkAtomNode(CommonTree atomNode) throws IllegalInputQueryException {
159 assert (atomNode.getType() == ConjunctiveQueryLexer.ATOM);
160 Iterator<CommonTree> iterator = childrenOf(atomNode).iterator();
161 CommonTree id = iterator.next();
162 String predicatename = walkCompositeId(id);
163 List<Term> listofterms = new ArrayList<Term>();
164 while (iterator.hasNext()){
165 listofterms.add(walkTermNode(iterator.next()));
166 }
167 if(listofterms.isEmpty() || (listofterms.size()>2))
168 throw new IllegalInputQueryException("Problem parsing terms in the query");
169 Term[] terms = listofterms.toArray(new Term[listofterms.size()]);
170 if (terms.length == 1)
171 return Atom.create(AtomicConcept.create(predicatename), terms);
172 if (terms.length == 2)
173 return Atom.create(AtomicRole.create(predicatename), terms);
174 throw new IllegalInputQueryException("Problem parsing terms in the query");
175 }
176
177
178}
179
diff --git a/src/org/semanticweb/karma2/profile/ELHOProfile.java b/src/org/semanticweb/karma2/profile/ELHOProfile.java
new file mode 100644
index 0000000..fb68b05
--- /dev/null
+++ b/src/org/semanticweb/karma2/profile/ELHOProfile.java
@@ -0,0 +1,281 @@
1package org.semanticweb.karma2.profile;
2
3import java.util.HashSet;
4import java.util.Iterator;
5import java.util.Set;
6
7import org.semanticweb.owlapi.model.AxiomType;
8import org.semanticweb.owlapi.model.IRI;
9import org.semanticweb.owlapi.model.OWLAxiom;
10import org.semanticweb.owlapi.model.OWLClassExpression;
11import org.semanticweb.owlapi.model.OWLDataHasValue;
12import org.semanticweb.owlapi.model.OWLDataIntersectionOf;
13import org.semanticweb.owlapi.model.OWLDataOneOf;
14import org.semanticweb.owlapi.model.OWLDataProperty;
15import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom;
16import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom;
17import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom;
18import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom;
19import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom;
20import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom;
21import org.semanticweb.owlapi.model.OWLHasKeyAxiom;
22import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom;
23import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom;
24import org.semanticweb.owlapi.model.OWLObjectHasSelf;
25import org.semanticweb.owlapi.model.OWLObjectMinCardinality;
26import org.semanticweb.owlapi.model.OWLObjectOneOf;
27import org.semanticweb.owlapi.model.OWLOntology;
28import org.semanticweb.owlapi.model.OWLOntologyCreationException;
29import org.semanticweb.owlapi.model.OWLOntologyManager;
30import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom;
31import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom;
32import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom;
33import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom;
34import org.semanticweb.owlapi.profiles.OWL2ELProfile;
35import org.semanticweb.owlapi.profiles.OWLProfile;
36import org.semanticweb.owlapi.profiles.OWLProfileReport;
37import org.semanticweb.owlapi.profiles.OWLProfileViolation;
38import org.semanticweb.owlapi.profiles.UseOfDataOneOfWithMultipleLiterals;
39import org.semanticweb.owlapi.profiles.UseOfIllegalAxiom;
40import org.semanticweb.owlapi.profiles.UseOfIllegalClassExpression;
41import org.semanticweb.owlapi.profiles.UseOfObjectOneOfWithMultipleIndividuals;
42import org.semanticweb.owlapi.util.OWLObjectPropertyManager;
43import org.semanticweb.owlapi.util.OWLOntologyWalker;
44import org.semanticweb.owlapi.util.OWLOntologyWalkerVisitor;
45import uk.ac.ox.cs.pagoda.util.Utility;
46
47public class ELHOProfile implements OWLProfile {
48
49 public OWLOntology getFragment(OWLOntology ontology) {
50 OWLOntologyManager manager = ontology.getOWLOntologyManager();
51 OWLOntology elhoOntology = null;
52 try {
53 Utility.logDebug("OntologyID: " + ontology.getOntologyID());
54 try {
55 String ontologyIRI = ontology.getOntologyID().getOntologyIRI().toString();
56 if (ontologyIRI.contains(".owl"))
57 ontologyIRI = ontologyIRI.replace(".owl", "-elho.owl");
58 else
59 ontologyIRI = ontologyIRI + "elho";
60 elhoOntology = manager.createOntology(IRI.create(ontologyIRI));
61 } catch (NullPointerException e) {
62// e.printStackTrace();
63 elhoOntology = manager.createOntology();
64 }
65
66 } catch (OWLOntologyCreationException e) {
67 e.printStackTrace();
68 }
69 for (OWLOntology onto: ontology.getImportsClosure())
70 manager.addAxioms(elhoOntology, onto.getAxioms());
71
72 // TODO to be checked ...
73 manager.removeAxioms(elhoOntology, elhoOntology.getAxioms(AxiomType.DIFFERENT_INDIVIDUALS));
74
75 OWLProfileReport report = checkOntology(elhoOntology);
76
77 for (OWLProfileViolation violation: report.getViolations()) {
78 OWLAxiom axiom = violation.getAxiom();
79 manager.removeAxiom(elhoOntology, axiom);
80 }
81 Utility.logDebug("ELHO fragment extracted ... ");
82
83 return elhoOntology;
84 }
85
86 @Override
87 public OWLProfileReport checkOntology(OWLOntology ontology) {
88 OWL2ELProfile profile = new OWL2ELProfile();
89 OWLProfileReport report = profile.checkOntology(ontology);
90 Set<OWLProfileViolation> violations = new HashSet<OWLProfileViolation>();
91 violations.addAll(report.getViolations());
92 MyOWLOntologyWalker ontologyWalker = new MyOWLOntologyWalker(ontology.getImportsClosure());
93 ELHOProfileObjectVisitor visitor = new ELHOProfileObjectVisitor(ontologyWalker, ontology.getOWLOntologyManager());
94 ontologyWalker.walkStructure(visitor);
95
96 for (Iterator<OWLProfileViolation> iter = violations.iterator(); iter.hasNext(); ) {
97 OWLProfileViolation vio = iter.next();
98 if (vio instanceof UseOfIllegalClassExpression) {
99 OWLClassExpression exp = ((UseOfIllegalClassExpression) vio).getOWLClassExpression();
100 if (exp instanceof OWLObjectMinCardinality && ((OWLObjectMinCardinality) exp).getCardinality() == 1)
101 iter.remove();
102 }
103 }
104
105 violations.addAll(visitor.getProfileViolations());
106 return new OWLProfileReport(this, violations);
107 }
108
109 @Override
110 public String getName() {
111 return "ELHO";
112 }
113
114 protected class ELHOProfileObjectVisitor extends OWLOntologyWalkerVisitor<Object> {
115
116 private final OWLOntologyManager man;
117
118 private OWLObjectPropertyManager propertyManager;
119
120 private final Set<OWLProfileViolation> profileViolations = new HashSet<OWLProfileViolation>();
121
122 public ELHOProfileObjectVisitor(OWLOntologyWalker walker, OWLOntologyManager man) {
123 super(walker);
124 this.man = man;
125 }
126
127 public Set<OWLProfileViolation> getProfileViolations() {
128 return new HashSet<OWLProfileViolation>(profileViolations);
129 }
130
131 @SuppressWarnings("unused")
132 private OWLObjectPropertyManager getPropertyManager() {
133 if (propertyManager == null) {
134 propertyManager = new OWLObjectPropertyManager(man, getCurrentOntology());
135 }
136 return propertyManager;
137 }
138
139
140
141 @Override
142 public Object visit(OWLDataProperty p) {
143 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
144 return null;
145 }
146
147
148 @Override
149 public Object visit(OWLObjectOneOf desc) {
150 if (desc.getIndividuals().size() != 1) {
151 profileViolations.add(new UseOfObjectOneOfWithMultipleIndividuals(getCurrentOntology(), getCurrentAxiom(), desc));
152 }
153 return null;
154 }
155
156 @Override
157 public Object visit(OWLDataHasValue desc) {
158 profileViolations.add(new UseOfIllegalClassExpression(getCurrentOntology(), getCurrentAxiom(), desc));
159 return null;
160 }
161
162 @Override
163 public Object visit(OWLDataSomeValuesFrom desc) {
164 profileViolations.add(new UseOfIllegalClassExpression(getCurrentOntology(), getCurrentAxiom(), desc));
165 return null;
166 }
167
168 @Override
169 public Object visit(OWLDataIntersectionOf desc) {
170 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
171 return null;
172 }
173
174 @Override
175 public Object visit(OWLSubDataPropertyOfAxiom desc) {
176 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
177 return null;
178 }
179
180 @Override
181 public Object visit(OWLEquivalentDataPropertiesAxiom desc) {
182 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
183 return null;
184 }
185
186 @Override
187 public Object visit(OWLTransitiveObjectPropertyAxiom desc) {
188 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
189 return null;
190 }
191
192 @Override
193 public Object visit(OWLReflexiveObjectPropertyAxiom desc) {
194 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
195 return null;
196 }
197
198 @Override
199 public Object visit(OWLDataPropertyDomainAxiom desc) {
200 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
201 return null;
202 }
203
204 @Override
205 public Object visit(OWLDataPropertyRangeAxiom desc) {
206 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
207 return null;
208 }
209
210
211
212 @Override
213 public Object visit(OWLDataPropertyAssertionAxiom desc) {
214 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
215 return null;
216 }
217
218 @Override
219 public Object visit(OWLNegativeDataPropertyAssertionAxiom desc) {
220 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
221 return null;
222 }
223
224 @Override
225 public Object visit(OWLNegativeObjectPropertyAssertionAxiom desc) {
226 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
227 return null;
228 }
229
230 @Override
231 public Object visit(OWLFunctionalDataPropertyAxiom desc) {
232 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
233 return null;
234 }
235
236 @Override
237 public Object visit(OWLHasKeyAxiom desc) {
238 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
239 return null;
240 }
241
242
243
244
245 @Override
246 public Object visit(OWLObjectHasSelf node) {
247 profileViolations.add(new UseOfIllegalClassExpression(getCurrentOntology(), getCurrentAxiom(), node));
248 return null;
249 }
250
251
252 @Override
253 public Object visit(OWLDataOneOf node) {
254 profileViolations.add(new UseOfDataOneOfWithMultipleLiterals(getCurrentOntology(), getCurrentAxiom(), node));
255 return null;
256 }
257
258
259
260 @Override
261 public Object visit(OWLSubPropertyChainOfAxiom axiom) {
262 profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom()));
263
264 return null;
265 }
266
267 @Override
268 public Object visit(OWLOntology ontology) {
269 propertyManager = null;
270 return null;
271 }
272 }
273
274 @Override
275 public IRI getIRI() {
276 return null;
277 }
278
279
280}
281
diff --git a/src/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java b/src/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java
new file mode 100644
index 0000000..508632f
--- /dev/null
+++ b/src/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java
@@ -0,0 +1,913 @@
1package org.semanticweb.karma2.profile;
2
3import java.util.ArrayList;
4import java.util.Collection;
5import java.util.HashSet;
6import java.util.Set;
7
8import org.semanticweb.owlapi.model.IRI;
9import org.semanticweb.owlapi.model.OWLAnnotation;
10import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
11import org.semanticweb.owlapi.model.OWLAnnotationProperty;
12import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom;
13import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom;
14import org.semanticweb.owlapi.model.OWLAnonymousIndividual;
15import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom;
16import org.semanticweb.owlapi.model.OWLAxiom;
17import org.semanticweb.owlapi.model.OWLClass;
18import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
19import org.semanticweb.owlapi.model.OWLClassExpression;
20import org.semanticweb.owlapi.model.OWLDataAllValuesFrom;
21import org.semanticweb.owlapi.model.OWLDataComplementOf;
22import org.semanticweb.owlapi.model.OWLDataExactCardinality;
23import org.semanticweb.owlapi.model.OWLDataHasValue;
24import org.semanticweb.owlapi.model.OWLDataIntersectionOf;
25import org.semanticweb.owlapi.model.OWLDataMaxCardinality;
26import org.semanticweb.owlapi.model.OWLDataMinCardinality;
27import org.semanticweb.owlapi.model.OWLDataOneOf;
28import org.semanticweb.owlapi.model.OWLDataProperty;
29import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom;
30import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom;
31import org.semanticweb.owlapi.model.OWLDataPropertyExpression;
32import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom;
33import org.semanticweb.owlapi.model.OWLDataRange;
34import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom;
35import org.semanticweb.owlapi.model.OWLDataUnionOf;
36import org.semanticweb.owlapi.model.OWLDatatype;
37import org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom;
38import org.semanticweb.owlapi.model.OWLDatatypeRestriction;
39import org.semanticweb.owlapi.model.OWLDeclarationAxiom;
40import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom;
41import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom;
42import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom;
43import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom;
44import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom;
45import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom;
46import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom;
47import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom;
48import org.semanticweb.owlapi.model.OWLFacetRestriction;
49import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom;
50import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom;
51import org.semanticweb.owlapi.model.OWLHasKeyAxiom;
52import org.semanticweb.owlapi.model.OWLIndividual;
53import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom;
54import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom;
55import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom;
56import org.semanticweb.owlapi.model.OWLLiteral;
57import org.semanticweb.owlapi.model.OWLNamedIndividual;
58import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom;
59import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom;
60import org.semanticweb.owlapi.model.OWLObject;
61import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom;
62import org.semanticweb.owlapi.model.OWLObjectComplementOf;
63import org.semanticweb.owlapi.model.OWLObjectExactCardinality;
64import org.semanticweb.owlapi.model.OWLObjectHasSelf;
65import org.semanticweb.owlapi.model.OWLObjectHasValue;
66import org.semanticweb.owlapi.model.OWLObjectIntersectionOf;
67import org.semanticweb.owlapi.model.OWLObjectInverseOf;
68import org.semanticweb.owlapi.model.OWLObjectMaxCardinality;
69import org.semanticweb.owlapi.model.OWLObjectMinCardinality;
70import org.semanticweb.owlapi.model.OWLObjectOneOf;
71import org.semanticweb.owlapi.model.OWLObjectProperty;
72import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
73import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom;
74import org.semanticweb.owlapi.model.OWLObjectPropertyExpression;
75import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom;
76import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom;
77import org.semanticweb.owlapi.model.OWLObjectUnionOf;
78import org.semanticweb.owlapi.model.OWLObjectVisitor;
79import org.semanticweb.owlapi.model.OWLObjectVisitorEx;
80import org.semanticweb.owlapi.model.OWLOntology;
81import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom;
82import org.semanticweb.owlapi.model.OWLSameIndividualAxiom;
83import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom;
84import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
85import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom;
86import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom;
87import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom;
88import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom;
89import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom;
90import org.semanticweb.owlapi.model.SWRLAtom;
91import org.semanticweb.owlapi.model.SWRLBuiltInAtom;
92import org.semanticweb.owlapi.model.SWRLClassAtom;
93import org.semanticweb.owlapi.model.SWRLDArgument;
94import org.semanticweb.owlapi.model.SWRLDataPropertyAtom;
95import org.semanticweb.owlapi.model.SWRLDataRangeAtom;
96import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom;
97import org.semanticweb.owlapi.model.SWRLIndividualArgument;
98import org.semanticweb.owlapi.model.SWRLLiteralArgument;
99import org.semanticweb.owlapi.model.SWRLObjectPropertyAtom;
100import org.semanticweb.owlapi.model.SWRLRule;
101import org.semanticweb.owlapi.model.SWRLSameIndividualAtom;
102import org.semanticweb.owlapi.model.SWRLVariable;
103import org.semanticweb.owlapi.util.OWLOntologyWalker;
104
105
106public class MyOWLOntologyWalker extends OWLOntologyWalker {
107
108 private final Collection<OWLOntology> ontologies;
109
110 /**
111 * @param objects the set of objects to visit
112 */
113 public MyOWLOntologyWalker(Set<OWLOntology> objects) {
114 this(objects, true);
115 }
116 /**
117 * @param visitDuplicates true if duplicates should be visited
118 * @param objects the set of objects to visit
119 */
120 public MyOWLOntologyWalker(Set<OWLOntology> objects, boolean visitDuplicates) {
121 super(objects);
122 this.ontologies = new ArrayList<OWLOntology>(objects);
123 }
124
125 /**
126 * @param v visitor to use over the objects
127 */
128 public void walkStructure(OWLObjectVisitorEx<?> v) {
129 this.visitor = v;
130 StructureWalker walker = new StructureWalker();
131 for (OWLOntology o : ontologies) {
132 o.accept(walker);
133 }
134 }
135
136 private class StructureWalker implements OWLObjectVisitor {
137
138 private final Set<OWLObject> visited = new HashSet<OWLObject>();
139
140 public StructureWalker() {}
141
142 private void process(OWLObject object) {
143 if (!visitDuplicates) {
144 if (!visited.contains(object)) {
145 visited.add(object);
146 object.accept(visitor);
147 }
148 }
149 else {
150 object.accept(visitor);
151 }
152 }
153
154 @Override
155 public void visit(IRI iri) {
156 process(iri);
157 }
158
159 @Override
160 public void visit(OWLOntology ontologyToVisit) {
161 MyOWLOntologyWalker.this.ontology = ontologyToVisit;
162 MyOWLOntologyWalker.this.ax = null;
163 process(ontologyToVisit);
164 for (OWLAnnotation anno : ontologyToVisit.getAnnotations()) {
165 anno.accept(this);
166 }
167 for (OWLAxiom a : ontologyToVisit.getAxioms()) {
168 a.accept(this);
169 }
170 }
171
172
173 @Override
174 public void visit(OWLAsymmetricObjectPropertyAxiom axiom) {
175 MyOWLOntologyWalker.this.ax = axiom;
176 process(axiom);
177 axiom.getProperty().accept(this);
178 }
179
180
181 @Override
182 public void visit(OWLClassAssertionAxiom axiom) {
183 MyOWLOntologyWalker.this.ax = axiom;
184 process(axiom);
185 axiom.getIndividual().accept(this);
186 axiom.getClassExpression().accept(this);
187 }
188
189
190 @Override
191 public void visit(OWLDataPropertyAssertionAxiom axiom) {
192 MyOWLOntologyWalker.this.ax = axiom;
193 process(axiom);
194 axiom.getSubject().accept(this);
195 axiom.getProperty().accept(this);
196 axiom.getObject().accept(this);
197 }
198
199
200 @Override
201 public void visit(OWLDataPropertyDomainAxiom axiom) {
202 MyOWLOntologyWalker.this.ax = axiom;
203 process(axiom);
204 axiom.getDomain().accept(this);
205 axiom.getProperty().accept(this);
206 }
207
208
209 @Override
210 public void visit(OWLDataPropertyRangeAxiom axiom) {
211 MyOWLOntologyWalker.this.ax = axiom;
212 process(axiom);
213 axiom.getRange().accept(this);
214 axiom.getProperty().accept(this);
215 }
216
217
218 @Override
219 public void visit(OWLSubDataPropertyOfAxiom axiom) {
220 MyOWLOntologyWalker.this.ax = axiom;
221 process(axiom);
222 axiom.getSubProperty().accept(this);
223 axiom.getSuperProperty().accept(this);
224 }
225
226
227 @Override
228 public void visit(OWLDeclarationAxiom axiom) {
229 MyOWLOntologyWalker.this.ax = axiom;
230 process(axiom);
231 axiom.getEntity().accept(this);
232 }
233
234
235 @Override
236 public void visit(OWLDifferentIndividualsAxiom axiom) {
237 MyOWLOntologyWalker.this.ax = axiom;
238 process(axiom);
239 for (OWLIndividual ind : axiom.getIndividuals()) {
240 ind.accept(this);
241 }
242 }
243
244
245 @Override
246 public void visit(OWLDisjointClassesAxiom axiom) {
247 MyOWLOntologyWalker.this.ax = axiom;
248 process(axiom);
249 for (OWLClassExpression desc : axiom.getClassExpressions()) {
250 desc.accept(this);
251 }
252 }
253
254
255 @Override
256 public void visit(OWLDisjointDataPropertiesAxiom axiom) {
257 MyOWLOntologyWalker.this.ax = axiom;
258 process(axiom);
259 for (OWLDataPropertyExpression prop : axiom.getProperties()) {
260 prop.accept(this);
261 }
262 }
263
264
265 @Override
266 public void visit(OWLDisjointObjectPropertiesAxiom axiom) {
267 MyOWLOntologyWalker.this.ax = axiom;
268 process(axiom);
269 for (OWLObjectPropertyExpression prop : axiom.getProperties()) {
270 prop.accept(this);
271 }
272 }
273
274
275 @Override
276 public void visit(OWLDisjointUnionAxiom axiom) {
277 MyOWLOntologyWalker.this.ax = axiom;
278 process(axiom);
279 axiom.getOWLClass().accept(this);
280 for (OWLClassExpression desc : axiom.getClassExpressions()) {
281 desc.accept(this);
282 }
283 }
284
285
286 @Override
287 public void visit(OWLAnnotationAssertionAxiom axiom) {
288 MyOWLOntologyWalker.this.ax = axiom;
289 process(axiom);
290 axiom.getSubject().accept(this);
291 axiom.getAnnotation().accept(this);
292 }
293
294 @Override
295 public void visit(OWLAnnotationPropertyDomainAxiom axiom) {
296 MyOWLOntologyWalker.this.ax = axiom;
297 process(axiom);
298 axiom.getProperty().accept(this);
299 axiom.getDomain().accept(this);
300 }
301
302 @Override
303 public void visit(OWLAnnotationPropertyRangeAxiom axiom) {
304 MyOWLOntologyWalker.this.ax = axiom;
305 process(axiom);
306 axiom.getProperty().accept(this);
307 axiom.getRange().accept(this);
308 }
309
310 @Override
311 public void visit(OWLSubAnnotationPropertyOfAxiom axiom) {
312 MyOWLOntologyWalker.this.ax = axiom;
313 process(axiom);
314 axiom.getSubProperty().accept(this);
315 axiom.getSuperProperty().accept(this);
316 }
317
318 @Override
319 public void visit(OWLAnnotation node) {
320 process(node);
321 annotation = node;
322 node.getProperty().accept(this);
323 node.getValue().accept(this);
324 }
325
326 @Override
327 public void visit(OWLEquivalentClassesAxiom axiom) {
328 MyOWLOntologyWalker.this.ax = axiom;
329 process(axiom);
330 for (OWLClassExpression desc : axiom.getClassExpressions()) {
331 desc.accept(this);
332 }
333 }
334
335
336 @Override
337 public void visit(OWLEquivalentDataPropertiesAxiom axiom) {
338 MyOWLOntologyWalker.this.ax = axiom;
339 process(axiom);
340 for (OWLDataPropertyExpression prop : axiom.getProperties()) {
341 prop.accept(this);
342 }
343 }
344
345
346 @Override
347 public void visit(OWLEquivalentObjectPropertiesAxiom axiom) {
348 MyOWLOntologyWalker.this.ax = axiom;
349 process(axiom);
350 for (OWLObjectPropertyExpression prop : axiom.getProperties()) {
351 prop.accept(this);
352 }
353 }
354
355
356 @Override
357 public void visit(OWLFunctionalDataPropertyAxiom axiom) {
358 MyOWLOntologyWalker.this.ax = axiom;
359 process(axiom);
360 axiom.getProperty().accept(this);
361 }
362
363
364 @Override
365 public void visit(OWLFunctionalObjectPropertyAxiom axiom) {
366 MyOWLOntologyWalker.this.ax = axiom;
367 process(axiom);
368 axiom.getProperty().accept(this);
369 }
370
371 @Override
372 public void visit(OWLInverseFunctionalObjectPropertyAxiom axiom) {
373 MyOWLOntologyWalker.this.ax = axiom;
374 process(axiom);
375 axiom.getProperty().accept(this);
376 }
377
378
379 @Override
380 public void visit(OWLInverseObjectPropertiesAxiom axiom) {
381 MyOWLOntologyWalker.this.ax = axiom;
382 process(axiom);
383 axiom.getFirstProperty().accept(this);
384 axiom.getSecondProperty().accept(this);
385 }
386
387
388 @Override
389 public void visit(OWLIrreflexiveObjectPropertyAxiom axiom) {
390 MyOWLOntologyWalker.this.ax = axiom;
391 process(axiom);
392 axiom.getProperty().accept(this);
393 }
394
395
396 @Override
397 public void visit(OWLNegativeDataPropertyAssertionAxiom axiom) {
398 MyOWLOntologyWalker.this.ax = axiom;
399 process(axiom);
400 axiom.getSubject().accept(this);
401 axiom.getProperty().accept(this);
402 axiom.getObject().accept(this);
403 }
404
405
406 @Override
407 public void visit(OWLNegativeObjectPropertyAssertionAxiom axiom) {
408 MyOWLOntologyWalker.this.ax = axiom;
409 process(axiom);
410 axiom.getSubject().accept(this);
411 axiom.getProperty().accept(this);
412 axiom.getObject().accept(this);
413 }
414
415
416 @Override
417 public void visit(OWLObjectPropertyAssertionAxiom axiom) {
418 MyOWLOntologyWalker.this.ax = axiom;
419 process(axiom);
420 axiom.getSubject().accept(this);
421 axiom.getProperty().accept(this);
422 axiom.getObject().accept(this);
423 }
424
425
426 @Override
427 public void visit(OWLSubPropertyChainOfAxiom axiom) {
428 MyOWLOntologyWalker.this.ax = axiom;
429 process(axiom);
430 for (OWLObjectPropertyExpression prop : axiom.getPropertyChain()) {
431 prop.accept(this);
432 }
433 axiom.getSuperProperty().accept(this);
434 }
435
436
437 @Override
438 public void visit(OWLObjectPropertyDomainAxiom axiom) {
439 MyOWLOntologyWalker.this.ax = axiom;
440 process(axiom);
441 axiom.getDomain().accept(this);
442 axiom.getProperty().accept(this);
443 }
444
445
446 @Override
447 public void visit(OWLObjectPropertyRangeAxiom axiom) {
448 MyOWLOntologyWalker.this.ax = axiom;
449 process(axiom);
450 axiom.getProperty().accept(this);
451 axiom.getRange().accept(this);
452 }
453
454
455 @Override
456 public void visit(OWLSubObjectPropertyOfAxiom axiom) {
457 MyOWLOntologyWalker.this.ax = axiom;
458 process(axiom);
459 axiom.getSubProperty().accept(this);
460 axiom.getSuperProperty().accept(this);
461 }
462
463
464 @Override
465 public void visit(OWLReflexiveObjectPropertyAxiom axiom) {
466 MyOWLOntologyWalker.this.ax = axiom;
467 process(axiom);
468 axiom.getProperty().accept(this);
469 }
470
471
472 @Override
473 public void visit(OWLSameIndividualAxiom axiom) {
474 MyOWLOntologyWalker.this.ax = axiom;
475 process(axiom);
476 for (OWLIndividual ind : axiom.getIndividuals()) {
477 ind.accept(this);
478 }
479 }
480
481
482 @Override
483 public void visit(OWLSubClassOfAxiom axiom) {
484 MyOWLOntologyWalker.this.ax = axiom;
485 process(axiom);
486 // -ve polarity
487 axiom.getSubClass().accept(this);
488 // +ve polarity
489 axiom.getSuperClass().accept(this);
490 }
491
492
493 @Override
494 public void visit(OWLSymmetricObjectPropertyAxiom axiom) {
495 MyOWLOntologyWalker.this.ax = axiom;
496 process(axiom);
497 axiom.getProperty().accept(this);
498 }
499
500
501 @Override
502 public void visit(OWLTransitiveObjectPropertyAxiom axiom) {
503 MyOWLOntologyWalker.this.ax = axiom;
504 process(axiom);
505 axiom.getProperty().accept(this);
506 }
507
508
509 @Override
510 public void visit(SWRLRule rule) {
511 MyOWLOntologyWalker.this.ax = rule;
512 process(rule);
513 for (SWRLAtom at : rule.getBody()) {
514 at.accept(this);
515 }
516 for (SWRLAtom at : rule.getHead()) {
517 at.accept(this);
518 }
519 }
520
521 @Override
522 public void visit(OWLHasKeyAxiom axiom) {
523 MyOWLOntologyWalker.this.ax = axiom;
524 process(axiom);
525 axiom.getClassExpression().accept(this);
526 for (OWLObjectPropertyExpression prop : axiom.getObjectPropertyExpressions()) {
527 prop.accept(this);
528 }
529 for (OWLDataPropertyExpression prop : axiom.getDataPropertyExpressions()) {
530 prop.accept(this);
531 }
532 }
533
534 @Override
535 public void visit(OWLClass desc) {
536 pushClassExpression(desc);
537 process(desc);
538 desc.getIRI().accept(this);
539 popClassExpression();
540 }
541
542
543 @Override
544 public void visit(OWLDataAllValuesFrom desc) {
545 pushClassExpression(desc);
546 process(desc);
547 desc.getProperty().accept(this);
548 desc.getFiller().accept(this);
549 popClassExpression();
550 }
551
552
553 @Override
554 public void visit(OWLDataExactCardinality desc) {
555 pushClassExpression(desc);
556 process(desc);
557 desc.getProperty().accept(this);
558 desc.getFiller().accept(this);
559 popClassExpression();
560 }
561
562
563 @Override
564 public void visit(OWLDataMaxCardinality desc) {
565 pushClassExpression(desc);
566 process(desc);
567 desc.getProperty().accept(this);
568 desc.getFiller().accept(this);
569 popClassExpression();
570 }
571
572
573 @Override
574 public void visit(OWLDataMinCardinality desc) {
575 pushClassExpression(desc);
576 process(desc);
577 desc.getProperty().accept(this);
578 desc.getFiller().accept(this);
579 popClassExpression();
580 }
581
582
583 @Override
584 public void visit(OWLDataSomeValuesFrom desc) {
585 pushClassExpression(desc);
586 process(desc);
587 desc.getProperty().accept(this);
588 desc.getFiller().accept(this);
589 popClassExpression();
590 }
591
592
593 @Override
594 public void visit(OWLDataHasValue desc) {
595 pushClassExpression(desc);
596 process(desc);
597 desc.getProperty().accept(this);
598 desc.getValue().accept(this);
599 popClassExpression();
600 }
601
602
603 @Override
604 public void visit(OWLObjectAllValuesFrom desc) {
605 pushClassExpression(desc);
606 process(desc);
607 desc.getProperty().accept(this);
608 desc.getFiller().accept(this);
609 popClassExpression();
610 }
611
612
613 @Override
614 public void visit(OWLObjectComplementOf desc) {
615 pushClassExpression(desc);
616 process(desc);
617 desc.getOperand().accept(this);
618 popClassExpression();
619 }
620
621
622 @Override
623 public void visit(OWLObjectExactCardinality desc) {
624 pushClassExpression(desc);
625 process(desc);
626 desc.getProperty().accept(this);
627 desc.getFiller().accept(this);
628 popClassExpression();
629 }
630
631
632 @Override
633 public void visit(OWLObjectIntersectionOf desc) {
634 pushClassExpression(desc);
635 process(desc);
636
637 for (OWLClassExpression op : desc.getOperands()) {
638 op.accept(this);
639 }
640 popClassExpression();
641 }
642
643
644 @Override
645 public void visit(OWLObjectMaxCardinality desc) {
646 pushClassExpression(desc);
647 process(desc);
648 desc.getProperty().accept(this);
649 desc.getFiller().accept(this);
650 popClassExpression();
651 }
652
653
654 @Override
655 public void visit(OWLObjectMinCardinality desc) {
656 pushClassExpression(desc);
657 process(desc);
658 desc.getProperty().accept(this);
659 desc.getFiller().accept(this);
660 popClassExpression();
661 }
662
663
664 @Override
665 public void visit(OWLObjectOneOf desc) {
666 pushClassExpression(desc);
667 process(desc);
668 for (OWLIndividual ind : desc.getIndividuals()) {
669 ind.accept(this);
670 }
671 popClassExpression();
672 }
673
674
675 @Override
676 public void visit(OWLObjectHasSelf desc) {
677 pushClassExpression(desc);
678 process(desc);
679 desc.getProperty().accept(this);
680 popClassExpression();
681 }
682
683
684 @Override
685 public void visit(OWLObjectSomeValuesFrom desc) {
686 pushClassExpression(desc);
687 process(desc);
688 desc.getProperty().accept(this);
689 desc.getFiller().accept(this);
690 popClassExpression();
691 }
692
693
694 @Override
695 public void visit(OWLObjectUnionOf desc) {
696 pushClassExpression(desc);
697 process(desc);
698 for (OWLClassExpression op : desc.getOperands()) {
699 op.accept(this);
700 }
701 popClassExpression();
702 }
703
704
705 @Override
706 public void visit(OWLObjectHasValue desc) {
707 pushClassExpression(desc);
708 process(desc);
709 desc.getProperty().accept(this);
710 desc.getValue().accept(this);
711 popClassExpression();
712 }
713
714
715 @Override
716 public void visit(OWLDataComplementOf node) {
717 pushDataRange(node);
718 process(node);
719 node.getDataRange().accept(this);
720 popDataRange();
721 }
722
723
724 @Override
725 public void visit(OWLDataOneOf node) {
726 pushDataRange(node);
727 process(node);
728 for (OWLLiteral con : node.getValues()) {
729 con.accept(this);
730 }
731 popDataRange();
732 }
733
734 @Override
735 public void visit(OWLDataIntersectionOf node) {
736 pushDataRange(node);
737 process(node);
738 for (OWLDataRange rng : node.getOperands()) {
739 rng.accept(this);
740 }
741 popDataRange();
742 }
743
744 @Override
745 public void visit(OWLDataUnionOf node) {
746 pushDataRange(node);
747 process(node);
748 for (OWLDataRange rng : node.getOperands()) {
749 rng.accept(this);
750 }
751 popDataRange();
752 }
753
754 @Override
755 public void visit(OWLFacetRestriction node) {
756 process(node);
757 node.getFacetValue().accept(this);
758 }
759
760
761 @Override
762 public void visit(OWLDatatypeRestriction node) {
763 pushDataRange(node);
764 process(node);
765 node.getDatatype().accept(this);
766 for (OWLFacetRestriction fr : node.getFacetRestrictions()) {
767 fr.accept(this);
768 }
769 popDataRange();
770 }
771
772
773 @Override
774 public void visit(OWLDatatype node) {
775 pushDataRange(node);
776 process(node);
777 popDataRange();
778 }
779
780 @Override
781 public void visit(OWLLiteral node) {
782 process(node);
783 node.getDatatype().accept(this);
784 popDataRange();
785 }
786
787 @Override
788 public void visit(OWLAnnotationProperty property) {
789 process(property);
790 property.getIRI().accept(this);
791 }
792
793 @Override
794 public void visit(OWLDataProperty property) {
795 process(property);
796 property.getIRI().accept(this);
797 }
798
799
800 @Override
801 public void visit(OWLObjectProperty property) {
802 process(property);
803 property.getIRI().accept(this);
804 }
805
806
807 @Override
808 public void visit(OWLObjectInverseOf property) {
809 process(property);
810 property.getInverse().accept(this);
811 }
812
813
814 @Override
815 public void visit(OWLNamedIndividual individual) {
816 process(individual);
817 individual.getIRI().accept(this);
818 }
819
820 @Override
821 public void visit(OWLAnonymousIndividual individual) {
822 process(individual);
823 }
824
825 @Override
826 public void visit(SWRLLiteralArgument node) {
827 process(node);
828 node.getLiteral().accept(this);
829 }
830
831
832 @Override
833 public void visit(SWRLVariable node) {
834 process(node);
835 }
836
837
838 @Override
839 public void visit(SWRLIndividualArgument node) {
840 process(node);
841 node.getIndividual().accept(this);
842 }
843
844
845 @Override
846 public void visit(SWRLBuiltInAtom node) {
847 process(node);
848 for (SWRLDArgument at : node.getArguments()) {
849 at.accept(this);
850 }
851 }
852
853
854 @Override
855 public void visit(SWRLClassAtom node) {
856 process(node);
857 node.getArgument().accept(this);
858 node.getPredicate().accept(this);
859 }
860
861
862 @Override
863 public void visit(SWRLDataRangeAtom node) {
864 process(node);
865 node.getArgument().accept(this);
866 node.getPredicate().accept(this);
867 }
868
869
870 @Override
871 public void visit(SWRLDataPropertyAtom node) {
872 process(node);
873 node.getPredicate().accept(this);
874 node.getFirstArgument().accept(this);
875 node.getSecondArgument().accept(this);
876 }
877
878
879 @Override
880 public void visit(SWRLDifferentIndividualsAtom node) {
881 process(node);
882 node.getFirstArgument().accept(this);
883 node.getSecondArgument().accept(this);
884 }
885
886
887 @Override
888 public void visit(SWRLObjectPropertyAtom node) {
889 process(node);
890 node.getPredicate().accept(this);
891 node.getFirstArgument().accept(this);
892 node.getSecondArgument().accept(this);
893 }
894
895
896 @Override
897 public void visit(SWRLSameIndividualAtom node) {
898 process(node);
899 node.getFirstArgument().accept(this);
900 node.getSecondArgument().accept(this);
901 }
902
903
904 @Override
905 public void visit(OWLDatatypeDefinitionAxiom axiom) {
906 MyOWLOntologyWalker.this.ax = axiom;
907 process(axiom);
908 axiom.getDatatype().accept(this);
909 axiom.getDataRange().accept(this);
910 }
911 }
912}
913
diff --git a/src/org/semanticweb/simpleETL/RDFHandlerWriter.java b/src/org/semanticweb/simpleETL/RDFHandlerWriter.java
new file mode 100644
index 0000000..e5e2e2a
--- /dev/null
+++ b/src/org/semanticweb/simpleETL/RDFHandlerWriter.java
@@ -0,0 +1,45 @@
1package org.semanticweb.simpleETL;
2import org.openrdf.model.Statement;
3import org.openrdf.rio.RDFHandler;
4import org.openrdf.rio.RDFHandlerException;
5import org.openrdf.rio.RDFWriter;
6
7
8public class RDFHandlerWriter implements RDFHandler {
9 protected RDFWriter m_writer;
10 protected boolean m_started;
11
12 public RDFHandlerWriter(RDFWriter writer){
13 m_writer = writer;
14 m_started = false;
15 }
16
17 @Override
18 public void endRDF() throws RDFHandlerException {
19 // Do not end
20 }
21
22 @Override
23 public void handleComment(String arg0) throws RDFHandlerException {
24 m_writer.handleComment(arg0);
25
26 }
27
28 @Override
29 public void handleNamespace(String arg0, String arg1) throws RDFHandlerException {
30 m_writer.handleNamespace(arg0, arg1);
31 }
32
33 @Override
34 public void handleStatement(Statement arg0) throws RDFHandlerException {
35 m_writer.handleStatement(arg0);
36 }
37
38 @Override
39 public void startRDF() throws RDFHandlerException {
40 if(!m_started) {
41 m_started = true;
42 m_writer.startRDF();
43 }
44 }
45}
diff --git a/src/org/semanticweb/simpleETL/SimpleETL.java b/src/org/semanticweb/simpleETL/SimpleETL.java
new file mode 100644
index 0000000..4d4a193
--- /dev/null
+++ b/src/org/semanticweb/simpleETL/SimpleETL.java
@@ -0,0 +1,71 @@
1package org.semanticweb.simpleETL;
2import java.io.File;
3import java.io.FileInputStream;
4import java.io.FileOutputStream;
5import java.util.regex.Pattern;
6
7import org.openrdf.rio.RDFParser;
8import org.openrdf.rio.RDFWriter;
9import org.openrdf.rio.rdfxml.RDFXMLParser;
10import org.openrdf.rio.turtle.TurtleWriter;
11
12import uk.ac.ox.cs.pagoda.util.Utility;
13
14public class SimpleETL {
15
16 protected final static String m_prefix_LUBM = "http://www.lehigh.edu/~zhp2/2004/0401/univ-bench.owl#";
17 protected final static String m_prefix_UOBM = "http://semantics.crl.ibm.com/univ-bench-dl.owl#";
18 protected final static String m_prefix_FLY= "http://www.virtualflybrain.org/ontologies/individual_neurons/FC_neuron_GF_an.owl#";
19
20 String m_prefix;
21 String m_fileToImport;
22 String m_fileToExport;
23
24 public SimpleETL(String prefix, String fileToImport) {
25 m_prefix = prefix;
26 m_fileToImport = fileToImport;
27 m_fileToExport = m_fileToImport.replace(".owl", ".ttl");
28 }
29
30 public SimpleETL(String prefix, String fileToImport, String outPath) {
31 m_prefix = prefix;
32 m_fileToImport = fileToImport;
33 File file = new File(outPath);
34 if (file.exists() && file.isDirectory())
35 m_fileToExport = outPath + Utility.FILE_SEPARATOR + "data.ttl";
36 else
37 m_fileToExport = outPath;
38// + Utility.FILE_SEPARATOR + m_fileToImport.substring(m_fileToImport.lastIndexOf(Utility.FILE_SEPARATOR), m_fileToImport.lastIndexOf(".")) + ".ttl";
39 }
40
41 public void rewrite() throws Exception {
42// RDFParser parser = new TurtleParser();
43 RDFParser parser = new RDFXMLParser();
44
45 RDFWriter writer = new TurtleWriter(new FileOutputStream(m_fileToExport));
46
47// String m_fileToExport = m_fileToImport.replace(".owl", ".ntriple");
48// RDFWriter writer = new NTriplesWriter(new FileOutputStream(m_fileToExport));
49
50 RDFHandlerWriter multiHandler = new RDFHandlerWriter(writer);
51 parser.setRDFHandler(multiHandler);
52 File fileToImport = new File(m_fileToImport);
53 if(fileToImport.isDirectory()) {
54 for(File file : fileToImport.listFiles()) {
55 if(file.isFile() && (Pattern.matches(".*.owl", file.getName()) || Pattern.matches(".*.rdf", file.getName()))) {
56 Utility.logDebug("Parsing " + file.getName());
57 parser.parse(new FileInputStream(file), m_prefix);
58 }
59 }
60 }
61 else
62 parser.parse(new FileInputStream(fileToImport), m_prefix);
63 writer.endRDF();
64 Utility.logInfo("SimpleETL rewriting DONE",
65 "additional ontology data is saved in " + m_fileToExport + ".");
66 }
67
68 public String getExportedFile() {
69 return m_fileToExport;
70 }
71}