From 17bd9beaf7f358a44e5bf36a5855fe6727d506dc Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Tue, 10 May 2022 18:17:06 +0100 Subject: [pagoda] Move project to Scala This commit includes a few changes: - The repository still uses Maven to manage dependency but it is now a Scala project. - The code has been ported from OWLAPI 3.4.10 to 5.1.20 - A proof of concept program using both RSAComb and PAGOdA has been added. --- src/main/java/org/semanticweb/karma2/MyKarma.java | 469 ++++++ .../karma2/clausifier/OntologyProcesser.java | 572 +++++++ .../karma2/exception/ConstraintException.java | 17 + .../exception/IllegalInputOntologyException.java | 9 + .../exception/IllegalInputQueryException.java | 14 + .../karma2/exception/QueryExecutionException.java | 13 + .../semanticweb/karma2/model/ConjunctiveQuery.java | 80 + .../org/semanticweb/karma2/model/Equality.java | 36 + .../karma2/model/ExtendedConjunctiveQuery.java | 94 ++ .../karma2/model/cqparser/ConjunctiveQuery.g | 140 ++ .../karma2/model/cqparser/ConjunctiveQuery.tokens | 36 + .../model/cqparser/ConjunctiveQueryLexer.java | 814 ++++++++++ .../model/cqparser/ConjunctiveQueryParser.java | 1611 ++++++++++++++++++++ .../model/cqparser/ConjunctiveQueryWalker.java | 179 +++ .../semanticweb/karma2/profile/ELHOProfile.java | 261 ++++ .../karma2/profile/MyOWLOntologyWalker.java | 914 +++++++++++ 16 files changed, 5259 insertions(+) create mode 100644 src/main/java/org/semanticweb/karma2/MyKarma.java create mode 100644 src/main/java/org/semanticweb/karma2/clausifier/OntologyProcesser.java create mode 100644 src/main/java/org/semanticweb/karma2/exception/ConstraintException.java create mode 100644 src/main/java/org/semanticweb/karma2/exception/IllegalInputOntologyException.java create mode 100644 src/main/java/org/semanticweb/karma2/exception/IllegalInputQueryException.java create mode 100644 src/main/java/org/semanticweb/karma2/exception/QueryExecutionException.java create mode 100644 src/main/java/org/semanticweb/karma2/model/ConjunctiveQuery.java create mode 100644 src/main/java/org/semanticweb/karma2/model/Equality.java create mode 100644 src/main/java/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java create mode 100644 src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g create mode 100644 src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens create mode 100644 src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java create mode 100644 src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java create mode 100644 src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java create mode 100644 src/main/java/org/semanticweb/karma2/profile/ELHOProfile.java create mode 100644 src/main/java/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java (limited to 'src/main/java/org/semanticweb/karma2') diff --git a/src/main/java/org/semanticweb/karma2/MyKarma.java b/src/main/java/org/semanticweb/karma2/MyKarma.java new file mode 100644 index 0000000..b2b4352 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/MyKarma.java @@ -0,0 +1,469 @@ +package org.semanticweb.karma2; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Scanner; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import org.jgrapht.DirectedGraph; +import org.jgrapht.alg.CycleDetector; +import org.jgrapht.graph.DefaultDirectedGraph; +import org.jgrapht.graph.DefaultEdge; +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.Individual; +import org.semanticweb.HermiT.model.Term; +import org.semanticweb.karma2.exception.ConstraintException; +import org.semanticweb.karma2.model.ConjunctiveQuery; +import org.semanticweb.karma2.model.ExtendedConjunctiveQuery; + +import uk.ac.ox.cs.JRDFox.model.GroundTerm; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.Parameters; +import uk.ac.ox.cs.JRDFox.Prefixes; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; +import uk.ac.ox.cs.pagoda.MyPrefixes; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; +import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.UFS; +import uk.ac.ox.cs.pagoda.util.Utility; + +public class MyKarma { + + private DataStore store; + + private Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); + private Parameters parameters = new Parameters(); + + public MyKarma() { + store = RDFoxQueryEngine.createDataStore(); + parameters.m_allAnswersInRoot = true; + parameters.m_useBushy = true; + } + + private UFS equalityGroups = null; + + public void computeEqualityGroups() { + if (equalityGroups != null) return ; + equalityGroups = new UFS(); + TupleIterator answers = null; + try { + Timer t = new Timer(); + answers = store.compileQuery("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }", prefixes, parameters); + for (long multi = answers.open(); multi != 0; multi = answers.getNext()) { + if (answers.getResourceID(0) != answers.getResourceID(1)) + equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); + } + Utility.logInfo("@Time to group individuals by equality: " + t.duration()); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (answers != null) answers.dispose(); + } + } + + public DataStore getStore() { + return store; + } + + public long getNumberOfFacts() throws JRDFStoreException { + return store.getTriplesCount(); + } + + public void initializeData(File dataFile) throws JRDFStoreException, + FileNotFoundException { + store.importTurtleFile(dataFile, prefixes); + } + + public void materialise(File ruleFile) throws JRDFStoreException, FileNotFoundException { + Timer t = new Timer(); + Scanner scanner = new Scanner(ruleFile); + String datalogProgram = scanner.useDelimiter("\\Z").next(); + scanner.close(); + store.clearRulesAndMakeFactsExplicit(); +// store.addRules(new String[] {datalogProgram}); + store.importRules(datalogProgram); + store.applyReasoning(); + Utility.logDebug("elho-lower-store finished its own materialisation in " + t.duration() + " seconds."); + } + + public Collection answerCQ(ConjunctiveQuery q, boolean isGround) { + return answerCQ(q, null, isGround); + } + + boolean m_multiThread = false; + + public void setConcurrence(boolean multiThread) { + this.m_multiThread = multiThread; + } + + public Set answerCQ(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) { + computeEqualityGroups(); + if (m_multiThread) + return answerCQ_multiThread(q, soundAnswerTuples, isGround); + else + return answerCQ_singleThread(q, soundAnswerTuples, isGround); + } + + private Set answerCQ_multiThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) { + Set> set = new HashSet>(); + ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q); + TupleIterator tupleIterator = null; + ExecutorService es = null; + try { + tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters); + es = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + AnswerTuple tuple; + for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) { + Map match = new HashMap(); + for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) { + match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i)); + } + if ((tuple = contains(qext, soundAnswerTuples, match)) != null) + set.add(es.submit(new Spurious(qext, match, tuple, isGround))); + } + Set result = new HashSet(set.size()); + while(!set.isEmpty()) { + Iterator> it = set.iterator(); + while(it.hasNext()) { + Future isReady = it.next(); + if (isReady.isDone()) { + try { + tuple = isReady.get(); + if (tuple != null) + result.add(tuple); + it.remove(); + } catch (InterruptedException e) { + e.printStackTrace(); + } catch (ExecutionException e) { + e.printStackTrace(); + } + } + } + } + return result; + } catch (JRDFStoreException e1) { + e1.printStackTrace(); + return null; + } finally { + if (tupleIterator != null) tupleIterator.dispose(); + if (es != null) es.shutdown(); + } + } + + private Set answerCQ_singleThread(ConjunctiveQuery q, AnswerTuples soundAnswerTuples, boolean isGround) { + ExtendedConjunctiveQuery qext = ExtendedConjunctiveQuery.computeExtension(q); + boolean useBushyValue = parameters.m_useBushy, allAnswersInRootValue = parameters.m_allAnswersInRoot; + Set result = new HashSet(); + + TupleIterator tupleIterator = null; + try { + tupleIterator = store.compileQuery(qext.toString(), prefixes, parameters); + parameters.m_useBushy = false; + parameters.m_allAnswersInRoot = false; + + AnswerTuple tuple; + + for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) { + Map match = new HashMap(); + for (int i = 0; i < qext.getNumberOfAnswerTerms(); i++) { + match.put(qext.getAnswerTerm(i), tupleIterator.getGroundTerm(i)); + } + if (((tuple = contains(qext, soundAnswerTuples, match)) != null) && (new Spurious(qext, match, tuple, isGround).call()) != null) + result.add(tuple); + } + } catch (JRDFStoreException e) { + e.printStackTrace(); + return null; + } finally { + if (tupleIterator != null) tupleIterator.dispose(); + parameters.m_useBushy = useBushyValue; + parameters.m_allAnswersInRoot = allAnswersInRootValue; + } + return result; + } + + private AnswerTuple contains(ExtendedConjunctiveQuery qext, AnswerTuples answerTuples, Map match) { + GroundTerm[] terms = new GroundTerm[qext.getNumberOfRealAnswerTerms()]; + int index = 0; + for (Term t : qext.getRealAnswerTerms()) + terms[index++] = match.get(t); + AnswerTuple tuple = new AnswerTuple(terms); + if (answerTuples != null && answerTuples.contains(tuple)) return null; + return tuple; + } + + + class Spurious implements Callable { + private ExtendedConjunctiveQuery query; + private Map match; + private AnswerTuple tuple; + private boolean isGround; + + public Spurious(ExtendedConjunctiveQuery query, Map m, AnswerTuple t, boolean isGround) { + this.query = query; + this.match = m; + this.tuple = t; + this.isGround = isGround; + } + + public AnswerTuple call() { + if (isMappingAnswerVariablesToAuxiliary(query, match)); + else { + if (isGround) return tuple; + + EqualityConstraintRelation sim = new EqualityConstraintRelation(query, match); + try { + sim.computeRelation(); + if (areEqualityConstraintsSatisfiedByMatch(query, sim, match) + && !isCyclic(query, sim, match)) { + return tuple; + } + } catch (ConstraintException e) { + Utility.logError(e.toString()); + e.printStackTrace(); + return null; + } + } + return null; + } + + } + + private boolean isMappingAnswerVariablesToAuxiliary( + ExtendedConjunctiveQuery conjunctiveQuery, + Map match) { + for (Term ansQueryTerm : conjunctiveQuery.getRealAnswerTerms()) { + if (! (ansQueryTerm instanceof Individual)) { + GroundTerm datalog_term = match.get(ansQueryTerm); + if (isSyntacticAnonymous(datalog_term)) + return true; + } + } + return false; + } + + private boolean isCyclic(ExtendedConjunctiveQuery q, + EqualityConstraintRelation sim, Map match) { + DirectedGraph auxGraph = new DefaultDirectedGraph( + DefaultEdge.class); + for (Term queryTerm : q.getTerms()) { + if (!(queryTerm instanceof Individual) && isRealAnonymous(match.get(queryTerm))) + auxGraph.addVertex(sim.getRepresentative(queryTerm)); + } + for (Atom a : q.getAtoms()) + if (a.getArity() == 2 && !(a.getArgument(0) instanceof Individual) && !(a.getArgument(1) instanceof Individual)) + if (isRealAnonymous(match.get(a.getArgument(0))) && isRealAnonymous(match.get(a.getArgument(1)))) + auxGraph.addEdge(sim.getRepresentative(a.getArgument(0)), sim.getRepresentative(a.getArgument(0))); + return (new CycleDetector(auxGraph)).detectCycles(); + + } + + private boolean isRealAnonymous(GroundTerm datalog_t) { + if (!(datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual)) return false; + uk.ac.ox.cs.JRDFox.model.Individual ind = (uk.ac.ox.cs.JRDFox.model.Individual) datalog_t; + if (!ind.getIRI().startsWith(Namespace.KARMA_ANONY)) return false; + + return equalityGroups.find(ind.getIRI()).contains(Namespace.KARMA_ANONY); + +// String query = "select ?x where { ?x <" + ind.getIRI() + ">. } "; +// TupleIterator tupleIterator; +// try { +// tupleIterator = store.compileQuery(query, prefixes, parameters); +// } catch (JRDFStoreException e) { +// e.printStackTrace(); +// return false; +// } +// +// try { +// GroundTerm t; +// for (long multi = tupleIterator.open(); multi != 0; multi = tupleIterator.getNext()) { +// t = tupleIterator.getGroundTerm(0); +// if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual && !((uk.ac.ox.cs.JRDFox.model.Individual) t).isAnony) +// return false; +// } +// } catch (JRDFStoreException e) { +// e.printStackTrace(); +// return false; +// } finally { +// tupleIterator.dispose(); +// } +// return true; + } + + private boolean areEqualityConstraintsSatisfiedByMatch( + ExtendedConjunctiveQuery q, EqualityConstraintRelation sim, + Map m) throws ConstraintException { + for (Term s : q.getTerms()) + for (Term t : q.getTerms()) + if (sim.areConstraintToBeEqual(s, t)) { + if (!areMappedToEqualDatalogTerms(q, m, s, t)) + return false; + } + return true; + } + + private boolean areMappedToEqualDatalogTerms( + ExtendedConjunctiveQuery q, Map match, + Term queryTerm1, Term queryTerm2) { + GroundTerm datalogTerm1 = (queryTerm1 instanceof Individual) ? toRDFoxIndividual(queryTerm1) : match.get(queryTerm1); + GroundTerm datalogTerm2 = (queryTerm2 instanceof Individual) ? toRDFoxIndividual(queryTerm2) : match.get(queryTerm2); + if (datalogTerm1 != null && datalogTerm1.equals(datalogTerm2)) + return true; + + return equalityGroups.find(datalogTerm1.toString()).equals(datalogTerm2.toString()); +// String query = "prefix owl: select where {" +// + datalogTerm1 +// + " owl:sameAs " +// + datalogTerm2 +// + ". } "; +// TupleIterator tupleIterator; +// try { +// tupleIterator = store.compileQuery(query, prefixes, parameters); +// } catch (JRDFStoreException e) { +// e.printStackTrace(); +// return false; +// } +// boolean res = false; +// try { +// res = tupleIterator.open() != 0; +// } catch (JRDFStoreException e) { +// e.printStackTrace(); +// return false; +// } finally { +// tupleIterator.dispose(); +// } +// return res; + } + + private GroundTerm toRDFoxIndividual(Term t) { + return uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) t).getIRI()); + } + + private boolean isSyntacticAnonymous(GroundTerm datalog_t) { + if (datalog_t instanceof uk.ac.ox.cs.JRDFox.model.Individual && ((uk.ac.ox.cs.JRDFox.model.Individual) datalog_t).getIRI().startsWith(Namespace.KARMA_ANONY)) + return true; + return false; + } + + class EqualityConstraintRelation { + + private ExtendedConjunctiveQuery cq; + private Map match; + private Map> sim; + + public EqualityConstraintRelation(ExtendedConjunctiveQuery q, + Map m) { + cq = q; + match = m; + sim = new HashMap>(); + } + + public void addSingletonClass(Term t) { + Set eqclass = new HashSet(); + eqclass.add(t); + sim.put(t, eqclass); + } + + public boolean areConstraintToBeEqual(Term s, Term t) + throws ConstraintException { + Term sRepresentative = getRepresentative(s); + Term tRepresentative = getRepresentative(t); + if (sRepresentative == null || tRepresentative == null) { + throw new ConstraintException("Cannot identify terms " + s + + " and " + t); + } + return sRepresentative.equals(tRepresentative); + } + + public void constrainToBeEqual(Term s, Term t) + throws ConstraintException { + Term sRepresentative = getRepresentative(s); + Term tRepresentative = getRepresentative(t); + if (sRepresentative == null || tRepresentative == null) { + throw new ConstraintException("Cannot identify terms " + s + + " and " + t); + } + if (!sRepresentative.equals(tRepresentative)) { + sim.get(sRepresentative).addAll(sim.get(tRepresentative)); + sim.remove(tRepresentative); + } + } + + public Term getRepresentative(Term s) { + if (sim.containsKey(s)) + return s; + for (Term key : sim.keySet()) { + if (sim.get(key).contains(s)) + return key; + } + return null; + } + + public Set getEquivalenceClass(Term s) { + if (sim.containsKey(s)) + return sim.get(s); + for (Set eqClass : sim.values()) { + if (eqClass.contains(s)) + return eqClass; + } + return null; + } + + public void deriveForkConstraints() throws ConstraintException { + boolean newDerivedConstraints = true; + while (newDerivedConstraints) { + newDerivedConstraints = false; + for (Atom a1 : cq.getAtoms()) + for (Atom a2 : cq.getAtoms()) { + if (a1.getArity() == 2 && a2.getArity() == 2) { + GroundTerm term = a1.getArgument(1) instanceof Individual ? toRDFoxIndividual(a1.getArgument(1)) : match.get(a1.getArgument(1)); + if (areConstraintToBeEqual(a1.getArgument(1), a2.getArgument(1)) && !areConstraintToBeEqual(a1.getArgument(0),a2.getArgument(0))) { + if (isRealAnonymous(term)) { + constrainToBeEqual(a1.getArgument(0), a2.getArgument(0)); + newDerivedConstraints = true; + } + } + } + } + } + } + + public void computeRelation() throws ConstraintException { + for (Term t : cq.getTerms()) { + addSingletonClass(t); + } + deriveForkConstraints(); + } + + public String toString() { + String res = ""; + for (Set terms : this.sim.values()) { + res += "[ "; + for (Term t : terms) + res += t + " "; + res += "]\n"; + } + return res; + } + + } + + public void dispose() { + store.dispose(); + } + +} \ No newline at end of file diff --git a/src/main/java/org/semanticweb/karma2/clausifier/OntologyProcesser.java b/src/main/java/org/semanticweb/karma2/clausifier/OntologyProcesser.java new file mode 100644 index 0000000..5ff339e --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/clausifier/OntologyProcesser.java @@ -0,0 +1,572 @@ +package org.semanticweb.karma2.clausifier; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.AtomicConcept; +import org.semanticweb.HermiT.model.AtomicRole; +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.HermiT.model.Individual; +import org.semanticweb.HermiT.model.Role; +import org.semanticweb.HermiT.model.Term; +import org.semanticweb.HermiT.model.Variable; +import org.semanticweb.HermiT.structural.BuiltInPropertyManager; +import org.semanticweb.HermiT.structural.OWLAxioms; +import org.semanticweb.HermiT.structural.OWLAxiomsExpressivity; +import org.semanticweb.HermiT.structural.OWLNormalization; +import org.semanticweb.HermiT.structural.ObjectPropertyInclusionManager; +import org.semanticweb.karma2.exception.IllegalInputOntologyException; +import org.semanticweb.karma2.model.Equality; +import org.semanticweb.karma2.profile.ELHOProfile; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLClassExpressionVisitor; +import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; +import org.semanticweb.owlapi.model.OWLDataExactCardinality; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLDataHasValue; +import org.semanticweb.owlapi.model.OWLDataMaxCardinality; +import org.semanticweb.owlapi.model.OWLDataMinCardinality; +import org.semanticweb.owlapi.model.OWLDataProperty; +import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyExpression; +import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLIndividualAxiom; +import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectComplementOf; +import org.semanticweb.owlapi.model.OWLObjectExactCardinality; +import org.semanticweb.owlapi.model.OWLObjectHasSelf; +import org.semanticweb.owlapi.model.OWLObjectHasValue; +import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectInverseOf; +import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; +import org.semanticweb.owlapi.model.OWLObjectMinCardinality; +import org.semanticweb.owlapi.model.OWLObjectOneOf; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectUnionOf; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; +import org.semanticweb.owlapi.profiles.OWLProfileReport; +import org.semanticweb.owlapi.model.OWLAxiomVisitor; + +import uk.ac.ox.cs.pagoda.util.Utility; + +public class OntologyProcesser { + + + protected static final Variable X=Variable.create("?X"); + protected static final Variable Y=Variable.create("?Y"); + protected static final Variable Z=Variable.create("?Z"); + + + public static void transformOntology(OWLOntology root, File dataFile, File ruleFile) throws IllegalInputOntologyException { + ELHOProfile profile = new ELHOProfile(); + OWLProfileReport report = profile.checkOntology(root); + if (!report.isInProfile()) { + Utility.logError(report.toString()); + throw new IllegalInputOntologyException("the ontology is not ELHO"); + } + OntologyProcesser processer = new OntologyProcesser(); + processer.preprocessAndClausify(root, dataFile, ruleFile); + } + + + private void preprocessAndClausify(OWLOntology rootOntology, File dataFile, File ruleFile) { + OWLDataFactory factory=rootOntology.getOWLOntologyManager().getOWLDataFactory(); + String ontologyIRI=rootOntology.getOntologyID().getDefaultDocumentIRI()==null ? "urn:hermit:kb" : rootOntology.getOntologyID().getDefaultDocumentIRI().toString(); + Collection importClosure=rootOntology.getImportsClosure(); + OWLAxioms axioms=new OWLAxioms(); + OWLNormalization normalization=new OWLNormalization(factory,axioms,0); + for (OWLOntology ontology : importClosure) { + normalization.processOntology(ontology); + } + BuiltInPropertyManager builtInPropertyManager=new BuiltInPropertyManager(factory); + builtInPropertyManager.axiomatizeBuiltInPropertiesAsNeeded(axioms); + ObjectPropertyInclusionManager objectPropertyInclusionManager=new ObjectPropertyInclusionManager(axioms); + objectPropertyInclusionManager.rewriteAxioms(factory,axioms,0); + OWLAxiomsExpressivity axiomsExpressivity=new OWLAxiomsExpressivity(axioms); + clausify(factory,ontologyIRI,axioms,axiomsExpressivity, dataFile,ruleFile); + writeTopRules(rootOntology.getClassesInSignature(), rootOntology.getObjectPropertiesInSignature(), ruleFile); + +} + + + private void writeTopRules(Set classes, Set properties, File ruleFile) { + PrintWriter writer = null; + try { + writer = new PrintWriter(new BufferedWriter(new FileWriter(ruleFile, true))); + for (OWLClass cls : classes) { + writer.println("(?X) :- <" + cls.toStringID() + ">(?X)."); + } + + for (OWLObjectProperty prop : properties) { + writer.println("(?X) :- <" + prop.toStringID() + ">(?X,?Y)."); + writer.println("(?Y) :- <" + prop.toStringID() + ">(?X,?Y)."); + } + + }catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } finally{ + writer.close(); + classes.clear(); + properties.clear(); + } + + } + + private void writeDataFile(Set positiveFacts, File dataFile) { + PrintWriter writer = null; + try { + writer = new PrintWriter(dataFile); + for (Atom a: positiveFacts) { + if (a.getArity() == 1) { + writer.println(a.getArgument(0)+ " " + a.getDLPredicate() + " . "); + } + + if (a.getArity() == 2) { + writer.println(a.getArgument(0)+ " " + a.getDLPredicate() + " "+ a.getArgument(1) + " . "); + } + } + }catch (FileNotFoundException e) { + e.printStackTrace(); + } finally{ + writer.close(); + positiveFacts.clear(); + positiveFacts = null; + } + + } + + + private void writeRules(Set clauses, File ruleFile) { + PrintWriter writer = null; + boolean first; + Atom emptyHeadAtom = Atom.create(AtomicConcept.NOTHING, X); + try { + writer = new PrintWriter(ruleFile); + for (DLClause clause : clauses) { + Atom headAtom = clause.getHeadLength() > 0 ? clause.getHeadAtom(0) : emptyHeadAtom; + writer.print(headAtom + " :- "); + first = true; + for (Atom bodyAtom : clause.getBodyAtoms()) + if (first) { + writer.print( bodyAtom); + first = false; + } + else + writer.print( ", " + bodyAtom); + + writer.println(" ."); + } + writer.println("(?X,?Z) :- (?X,?Y), (?Y,?Z) ."); + writer.println("(?Y,?X) :- (?X,?Y) ."); + + }catch (FileNotFoundException e) { + e.printStackTrace(); + } finally{ + writer.close(); + clauses.clear(); + clauses = null; + } + + } + + + public void clausify(OWLDataFactory factory,String ontologyIRI,OWLAxioms axioms,OWLAxiomsExpressivity axiomsExpressivity, File dataFile, File ruleFile) { + Set dlClauses=new LinkedHashSet(); + Set positiveFacts=new HashSet(); + for (OWLObjectPropertyExpression[] inclusion : axioms.m_simpleObjectPropertyInclusions) { + Atom subRoleAtom=getRoleAtom(inclusion[0],X,Y); + Atom superRoleAtom=getRoleAtom(inclusion[1],X,Y); + DLClause dlClause=DLClause.create(new Atom[] { superRoleAtom },new Atom[] { subRoleAtom }); + dlClauses.add(dlClause); + } + NormalizedDatalogAxiomClausifier clausifier=new NormalizedDatalogAxiomClausifier(positiveFacts,factory); + for (OWLClassExpression[] inclusion : axioms.m_conceptInclusions) { + for (OWLClassExpression description : inclusion) + description.accept(clausifier); + for(DLClause dlClause :clausifier.getDLClause()) + dlClauses.add(dlClause.getSafeVersion(AtomicConcept.THING)); + } + DatalogFactClausifier factClausifier=new DatalogFactClausifier(positiveFacts); + for (OWLIndividualAxiom fact : axioms.m_facts) + fact.accept(factClausifier); + writeDataFile(positiveFacts, dataFile); + writeRules(dlClauses, ruleFile); + } + + protected static AtomicRole getAtomicRole(OWLDataPropertyExpression dataPropertyExpression) { + return AtomicRole.create(((OWLDataProperty)dataPropertyExpression).getIRI().toString()); + } + protected static Atom getRoleAtom(OWLObjectPropertyExpression objectProperty,Term first,Term second) { + objectProperty=objectProperty.getSimplified(); + if (!objectProperty.isAnonymous()) { + AtomicRole role=AtomicRole.create(objectProperty.asOWLObjectProperty().getIRI().toString()); + return Atom.create(role,first,second); + } + else if (objectProperty.isAnonymous()) { + OWLObjectProperty internalObjectProperty=objectProperty.getNamedProperty(); + AtomicRole role=AtomicRole.create(internalObjectProperty.getIRI().toString()); + return Atom.create(role,second,first); + } + else + throw new IllegalStateException("Internal error: unsupported type of object property!"); + } + + + protected static Role getRole(OWLObjectPropertyExpression objectPropertyExpression) { + objectPropertyExpression=objectPropertyExpression.getSimplified(); + if (objectPropertyExpression instanceof OWLObjectProperty) + return AtomicRole.create(((OWLObjectProperty)objectPropertyExpression).getIRI().toString()); + else if (objectPropertyExpression instanceof OWLObjectInverseOf) { + OWLObjectPropertyExpression internal=((OWLObjectInverseOf)objectPropertyExpression).getInverse(); + if (!(internal instanceof OWLObjectProperty)) + throw new IllegalStateException("Internal error: invalid normal form."); + return AtomicRole.create(((OWLObjectProperty)internal).getIRI().toString()).getInverse(); + } + else + throw new IllegalStateException("Internal error: invalid normal form."); + } + + protected static Atom getRoleAtom(OWLDataPropertyExpression dataProperty,Term first,Term second) { + if (dataProperty instanceof OWLDataProperty) { + AtomicRole property=AtomicRole.create(((OWLDataProperty)dataProperty).getIRI().toString()); + return Atom.create(property,first,second); + } + else + throw new IllegalStateException("Internal error: unsupported type of data property!"); + } + protected static Individual getIndividual(OWLIndividual individual) { + if (individual.isAnonymous()) + return Individual.createAnonymous(individual.asOWLAnonymousIndividual().getID().toString()); + else + return Individual.create(individual.asOWLNamedIndividual().getIRI().toString()); + } + + + protected static class NormalizedDatalogAxiomClausifier implements OWLClassExpressionVisitor { + protected final List m_headAtoms; + protected final List m_bodyAtoms; + protected final List m_auxAtoms; + protected final Set m_positiveFacts; + protected final OWLDataFactory m_factory; + protected int m_yIndex; + protected int m_zIndex; + + + public NormalizedDatalogAxiomClausifier(Set positiveFacts,OWLDataFactory factory) { + m_headAtoms=new ArrayList(); + m_bodyAtoms=new ArrayList(); + m_auxAtoms=new ArrayList(); + m_positiveFacts=positiveFacts; + m_factory=factory; + } + + + + protected Set getDLClause() { + + Set clauses = new HashSet(); + Atom[] headAtoms=new Atom[m_headAtoms.size()]; + m_headAtoms.toArray(headAtoms); + Atom[] bodyAtoms=new Atom[m_bodyAtoms.size()]; + m_bodyAtoms.toArray(bodyAtoms); + clauses.add(DLClause.create(headAtoms,bodyAtoms)); + if (!m_auxAtoms.isEmpty()) { + Atom[] auxAtoms=new Atom[m_auxAtoms.size()]; + m_auxAtoms.toArray(auxAtoms); + clauses.add(DLClause.create(auxAtoms,bodyAtoms)); + } + m_headAtoms.clear(); + m_bodyAtoms.clear(); + m_auxAtoms.clear(); + m_yIndex=0; + m_zIndex=0; + return clauses; + } + protected void ensureYNotZero() { + if (m_yIndex==0) + m_yIndex++; + } + protected Variable nextY() { + Variable result; + if (m_yIndex==0) + result=Y; + else + result=Variable.create("?Y"+m_yIndex); + m_yIndex++; + return result; + } + protected Variable nextZ() { + Variable result; + if (m_zIndex==0) + result=Z; + else + result=Variable.create("?Z"+m_zIndex); + m_zIndex++; + return result; + } + + + + + private void existentialRestriction(OWLObjectProperty prop, OWLClassExpression filler) { + if (filler.isAnonymous()) + throw new IllegalStateException("Internal error: invalid normal form."); + String propertyID = prop.asOWLObjectProperty().toStringID(); + String propertyShortID = propertyID.substring(propertyID.indexOf('#')+1); + String classID = filler.asOWLClass().toStringID(); + String classShortID = classID.substring(classID.indexOf('#')+1); + Individual auxInd = Individual.create("http://www.cs.ox.ac.uk/KARMA/anonymous#:"+propertyShortID + "-"+classShortID); + m_headAtoms.add(Atom.create(AtomicRole.create(propertyID), X, auxInd)); + m_auxAtoms.add(Atom.create(AtomicConcept.create(classID), auxInd)); + } + + + // Various types of descriptions + + public void visit(OWLClass object) { + + m_headAtoms.add(Atom.create(AtomicConcept.create(object.getIRI().toString()),X)); + } + + + public void visit(OWLObjectIntersectionOf object) { + throw new IllegalStateException("Internal error: invalid normal form."); + } + public void visit(OWLObjectUnionOf object) { + throw new IllegalStateException("Internal error: invalid normal form."); + } + + + public void visit(OWLObjectComplementOf object) { + OWLClassExpression description=object.getOperand(); + if (description instanceof OWLObjectHasSelf) { + OWLObjectPropertyExpression objectProperty=((OWLObjectHasSelf)description).getProperty(); + Atom roleAtom=getRoleAtom(objectProperty,X,X); + m_bodyAtoms.add(roleAtom); + throw new IllegalStateException("Internal error: invalid normal form."); + } + else if (description instanceof OWLObjectOneOf && ((OWLObjectOneOf)description).getIndividuals().size()==1) { + OWLIndividual individual=((OWLObjectOneOf)description).getIndividuals().iterator().next(); + m_bodyAtoms.add(Atom.create(Equality.INSTANCE,X, getIndividual(individual))); + } + else if (!(description instanceof OWLClass)) + throw new IllegalStateException("Internal error: invalid normal form."); + else + m_bodyAtoms.add(Atom.create(AtomicConcept.create(((OWLClass)description).getIRI().toString()),X)); + } + + + + public void visit(OWLObjectOneOf object) { + for (OWLIndividual individual : object.getIndividuals()) { + m_headAtoms.add(Atom.create(Equality.INSTANCE,X,getIndividual(individual))); + } + } + + + + + public void visit(OWLObjectSomeValuesFrom object) { + + OWLClassExpression filler=object.getFiller(); + if (filler instanceof OWLObjectOneOf) { + for (OWLIndividual individual : ((OWLObjectOneOf)filler).getIndividuals()) { + m_headAtoms.add(getRoleAtom(object.getProperty(),X,getIndividual(individual))); + } + } else { + if (filler.isAnonymous()) + throw new IllegalStateException("Internal error: invalid normal form."); + existentialRestriction(object.getProperty().asOWLObjectProperty(), filler); + } + } + + + public void visit(OWLObjectAllValuesFrom object) { + Variable y=nextY(); + m_bodyAtoms.add(getRoleAtom(object.getProperty(),X,y)); + OWLClassExpression filler=object.getFiller(); + + if (filler instanceof OWLClass) { + AtomicConcept atomicConcept=AtomicConcept.create(((OWLClass)filler).getIRI().toString()); + if (!atomicConcept.isAlwaysFalse()) + m_headAtoms.add(Atom.create(atomicConcept,y)); + } + else if (filler instanceof OWLObjectOneOf) { + for (OWLIndividual individual : ((OWLObjectOneOf)filler).getIndividuals()) { + m_headAtoms.add(Atom.create(Equality.INSTANCE,y,getIndividual(individual))); + } + } + else if (filler instanceof OWLObjectComplementOf) { + OWLClassExpression operand=((OWLObjectComplementOf)filler).getOperand(); + if (operand instanceof OWLClass) { + AtomicConcept internalAtomicConcept=AtomicConcept.create(((OWLClass)operand).getIRI().toString()); + if (!internalAtomicConcept.isAlwaysTrue()) + m_bodyAtoms.add(Atom.create(internalAtomicConcept,y)); + } + else if (operand instanceof OWLObjectOneOf && ((OWLObjectOneOf)operand).getIndividuals().size()==1) { + OWLIndividual individual=((OWLObjectOneOf)operand).getIndividuals().iterator().next(); + m_bodyAtoms.add(Atom.create(Equality.INSTANCE,y,getIndividual(individual))); + } + else + throw new IllegalStateException("Internal error: invalid normal form."); + } + else + throw new IllegalStateException("Internal error: invalid normal form."); + } + public void visit(OWLObjectHasValue object) { + throw new IllegalStateException("Internal error: invalid normal form."); + } + public void visit(OWLObjectHasSelf object) { + throw new IllegalStateException("Internal error: invalid normal form."); + } + + public void visit(OWLObjectMinCardinality object) { + if (object.getCardinality() != 1) + throw new IllegalStateException("Internal error: invalid normal form."); + existentialRestriction(object.getProperty().asOWLObjectProperty(), object.getFiller()); + } + public void visit(OWLObjectMaxCardinality object) { + throw new IllegalStateException("Internal error: invalid normal form."); +// int cardinality=object.getCardinality(); +// OWLObjectPropertyExpression onObjectProperty=object.getProperty(); +// OWLClassExpression filler=object.getFiller(); +// ensureYNotZero(); +// boolean isPositive; +// AtomicConcept atomicConcept; +// if (filler instanceof OWLClass) { +// isPositive=true; +// atomicConcept=AtomicConcept.create(((OWLClass)filler).getIRI().toString()); +// if (atomicConcept.isAlwaysTrue()) +// atomicConcept=null; +// } +// else if (filler instanceof OWLObjectComplementOf) { +// OWLClassExpression internal=((OWLObjectComplementOf)filler).getOperand(); +// if (!(internal instanceof OWLClass)) +// throw new IllegalStateException("Internal error: Invalid ontology normal form."); +// isPositive=false; +// atomicConcept=AtomicConcept.create(((OWLClass)internal).getIRI().toString()); +// if (atomicConcept.isAlwaysFalse()) +// atomicConcept=null; +// } +// else +// throw new IllegalStateException("Internal error: Invalid ontology normal form."); +// Role onRole=getRole(onObjectProperty); +// LiteralConcept toConcept=getLiteralConcept(filler); +// AnnotatedEquality annotatedEquality=AnnotatedEquality.create(cardinality,onRole,toConcept); +// Variable[] yVars=new Variable[cardinality+1]; +// for (int i=0;i2) { +// for (int i=0;i m_positiveFacts; + + public DatalogFactClausifier(Set positiveFacts) { + m_positiveFacts=positiveFacts; + } + public void visit(OWLSameIndividualAxiom object) { + OWLIndividual[] individuals=new OWLIndividual[object.getIndividuals().size()]; + object.getIndividuals().toArray(individuals); + for (int i=0;i iriMap = prefixes.getPrefixIRIsByPrefixName(); + for (String shortIri: iriMap.keySet()) + res += "prefix " + shortIri + " <" + iriMap.get(shortIri) + ">\n"; + res += "prefix rdf: \n"; + res += "select "; + for (Term t: m_answerTerms) + res+= " " + t + " "; + res += " WHERE { "; + for (Atom a : m_queryAtoms){ + if (a.getArity() == 1) + res+= a.getArgument(0) + " rdf:type " + a.getDLPredicate().toString().replace('<', ' ').replace('>', ' ') + " . "; + if (a.getArity() == 2) + res+= a.getArgument(0) + " " + a.getDLPredicate().toString().replace('<', ' ').replace('>', ' ') + " " + a.getArgument(1) + " . "; + } + return res + "}"; + + } + + + public static ConjunctiveQuery parse(String query) throws FileNotFoundException, IllegalInputQueryException, IOException { + return (new ConjunctiveQueryParser(query)).parse(); + } +} \ No newline at end of file diff --git a/src/main/java/org/semanticweb/karma2/model/Equality.java b/src/main/java/org/semanticweb/karma2/model/Equality.java new file mode 100644 index 0000000..fcb270c --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/Equality.java @@ -0,0 +1,36 @@ +package org.semanticweb.karma2.model; + +import java.io.Serializable; + +import org.semanticweb.HermiT.Prefixes; +import org.semanticweb.HermiT.model.DLPredicate; + +/** + * Represents the equality predicate. + */ +public class Equality implements DLPredicate,Serializable { + private static final long serialVersionUID=8308051741088513244L; + + public static final Equality INSTANCE=new Equality(); + + protected Equality () { + } + public int getArity() { + return 2; + } + public String toString(Prefixes prefixes) { + return ""; + } + public String toOrderedString(Prefixes prefixes) { + return toString(prefixes); + } + public String toString() { + return toString(Prefixes.STANDARD_PREFIXES); + } + protected Object readResolve() { + return INSTANCE; + } + public static Equality create() { + return INSTANCE; + } +} diff --git a/src/main/java/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java b/src/main/java/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java new file mode 100644 index 0000000..de2f87e --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/ExtendedConjunctiveQuery.java @@ -0,0 +1,94 @@ +package org.semanticweb.karma2.model; + +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.Term; +import org.semanticweb.HermiT.model.Variable; + +import uk.ac.ox.cs.JRDFox.Prefixes; + + +public class ExtendedConjunctiveQuery extends ConjunctiveQuery { + + private Term[] ansTerms; + private Term[] terms; + + public ExtendedConjunctiveQuery( + Atom[] queryAtoms, Term[] answerTerms, Prefixes pref) { + super(queryAtoms, getExtendedHead(queryAtoms, answerTerms), pref); + this.ansTerms = answerTerms.clone(); + terms = getQueryTerms(queryAtoms); + } + + public int getNumberOfRealAnswerTerms() { + return ansTerms.length; + } + + public Term getRealAnswerTerm(int termIndex) { + return ansTerms[termIndex]; + } + + public int getNumberOfTerms() { + return terms.length; + } + + public Term[] getTerms() { + return terms; + } + + + public Atom[] getAtoms() { + return m_queryAtoms; + } + + public Term[] getRealAnswerTerms() { + return ansTerms; + } + + private static Term[] getExtendedHead(Atom[] queryAtoms, Term[] answerTerms) { + List terms = new LinkedList(); + for (Term t :answerTerms) { + terms.add(t); + } + for (Atom a : queryAtoms) { + if (a.getArgument(0) instanceof Variable && !terms.contains(a.getArgument(0))) + terms.add(a.getArgument(0)); + if (a.getArity()> 1 && a.getArgument(1) instanceof Variable && !terms.contains(a.getArgument(1))) + terms.add(a.getArgument(1)); + } + return terms.toArray(new Term[terms.size()]); + + } + + private static Term[] getQueryTerms(Atom[] queryAtoms) { + Set terms = new LinkedHashSet(); + for (Atom a : queryAtoms) { + terms.add(a.getArgument(0)); + if (a.getArity()> 1) + terms.add(a.getArgument(1)); + } + return terms.toArray(new Term[terms.size()]); + } + + + public static ExtendedConjunctiveQuery computeExtension(ConjunctiveQuery q) { + Term[] answerTerms = new Term[q.getNumberOfAnswerTerms()]; + for (int i = 0; i < q.getNumberOfAnswerTerms(); i++) + answerTerms[i] = q.getAnswerTerm(i); + Atom[] atoms = new Atom[q.getNumberOfQueryAtoms()]; + for (int i = 0; i < q.getNumberOfQueryAtoms(); i++) + atoms[i] = q.getQueryAtom(i); + return new ExtendedConjunctiveQuery(atoms, answerTerms,q.prefixes); + + } + + public Term getTerm(int i) { + return terms[i]; + } + + +} diff --git a/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g new file mode 100644 index 0000000..621b0ce --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g @@ -0,0 +1,140 @@ +grammar ConjunctiveQuery; + +options { + language = Java; + output = AST; +} + +tokens { + VARIABLE; + CONSTANT; + SCONSTANT; + ATOM; + HEADATOM; + PREDICATE; + ATOM_LIST; + TERM_LIST; + RULE; + EXPRESSION; + PREFIX_LIST; + ID; + PREFIX; + PREDICATE; +} + +@header { +package org.semanticweb.karma2.model.cqparser; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Set; + +import org.semanticweb.karma2.model.ConjunctiveQuery; + + +import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryWalker; +import org.semanticweb.karma2.exception.IllegalInputQueryException; + + + +} + +@members{ + + + + public ConjunctiveQueryParser(String string) + throws FileNotFoundException, IOException { + this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRStringStream(string)))); + } + + public ConjunctiveQueryParser(InputStream istream) throws FileNotFoundException, IOException { + this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(istream)))); + + } + + + public ConjunctiveQueryParser(File file) throws FileNotFoundException, IOException { + this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(new FileInputStream(file))))); + + } + + public ConjunctiveQuery parse() throws IllegalInputQueryException { + cq_return r = null; + try { + r = cq(); + } catch (RecognitionException e) { + e.printStackTrace(); + } + CommonTree t = (CommonTree) r.getTree(); + + //System.out.println(t.toStringTree()); + CommonTreeNodeStream nodes = new CommonTreeNodeStream(t); + // AST nodes have payloads that point into token stream + nodes.setTokenStream(input); + + + ConjunctiveQueryWalker walker = new ConjunctiveQueryWalker(); + + ConjunctiveQuery cq = walker.walkExpressionNode(t); + return cq; + } + + public ConjunctiveQuery parseCQ() throws IllegalInputQueryException { + return parse(); + } + +} + + +@lexer::header{ +package org.semanticweb.karma2.model.cqparser; +} + +cq : + prefixlist rulebody -> ^(EXPRESSION prefixlist rulebody ); + +prefixlist: + prefix (',' prefix)* -> ^(PREFIX_LIST prefix*); + +prefix: + 'prefix' id ':' '<' url '>' -> ^(PREFIX id url); + + +rulebody: + headatom ('<-'|':') body '.'? -> ^(RULE headatom body); + +body: + atom (',' atom)* -> ^(ATOM_LIST atom*); + + +headatom: + id '(' term (',' term)* ')' -> ^(HEADATOM term*); + +atom: + compositeid '(' term (',' term)* ')' -> ^(ATOM compositeid term*); + +compositeid: + (id) ':' (id) -> ^(ID id id); + + +term: + variable -> ^(VARIABLE variable) + | simpleid -> ^(SCONSTANT simpleid) + | compositeid -> ^(CONSTANT compositeid); + +id : (STRING); +simpleid : '<' URLSTRING '>' | '<' STRING '>'; + +// TODO: FIXIT X1 can be parsed as variable +variable: + ('?') id -> ^(id); + + url : (URLSTRING); + +URLSTRING : ('http://'|'file:/') ('a'..'z'|'A'..'Z'|'0'..'9'|'/'|'#'|'.'|'-'|'~'|'_')+; +STRING : ('a'..'z'|'A'..'Z'|'0'..'9'|'/'|'#'|'.'|'-'|'_')+; +WS : (' '|'\n'|'\r')+ {$channel=HIDDEN;} ; diff --git a/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens new file mode 100644 index 0000000..3b4fa39 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.tokens @@ -0,0 +1,36 @@ +T__20=20 +T__21=21 +T__22=22 +T__23=23 +T__24=24 +T__25=25 +T__26=26 +T__27=27 +T__28=28 +T__29=29 +ATOM=4 +ATOM_LIST=5 +CONSTANT=6 +EXPRESSION=7 +HEADATOM=8 +ID=9 +PREDICATE=10 +PREFIX=11 +PREFIX_LIST=12 +RULE=13 +SCONSTANT=14 +STRING=15 +TERM_LIST=16 +URLSTRING=17 +VARIABLE=18 +WS=19 +'('=20 +')'=21 +','=22 +'.'=23 +':'=24 +'<'=25 +'<-'=26 +'>'=27 +'?'=28 +'prefix'=29 diff --git a/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java new file mode 100644 index 0000000..a97d7a4 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryLexer.java @@ -0,0 +1,814 @@ +// $ANTLR 3.5 /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g 2013-12-13 14:41:34 + +package org.semanticweb.karma2.model.cqparser; + + +import org.antlr.runtime.*; +import java.util.Stack; +import java.util.List; +import java.util.ArrayList; + +@SuppressWarnings("all") +public class ConjunctiveQueryLexer extends Lexer { + public static final int EOF=-1; + public static final int T__20=20; + public static final int T__21=21; + public static final int T__22=22; + public static final int T__23=23; + public static final int T__24=24; + public static final int T__25=25; + public static final int T__26=26; + public static final int T__27=27; + public static final int T__28=28; + public static final int T__29=29; + public static final int ATOM=4; + public static final int ATOM_LIST=5; + public static final int CONSTANT=6; + public static final int EXPRESSION=7; + public static final int HEADATOM=8; + public static final int ID=9; + public static final int PREDICATE=10; + public static final int PREFIX=11; + public static final int PREFIX_LIST=12; + public static final int RULE=13; + public static final int SCONSTANT=14; + public static final int STRING=15; + public static final int TERM_LIST=16; + public static final int URLSTRING=17; + public static final int VARIABLE=18; + public static final int WS=19; + + // delegates + // delegators + public Lexer[] getDelegates() { + return new Lexer[] {}; + } + + public ConjunctiveQueryLexer() {} + public ConjunctiveQueryLexer(CharStream input) { + this(input, new RecognizerSharedState()); + } + public ConjunctiveQueryLexer(CharStream input, RecognizerSharedState state) { + super(input,state); + } + @Override public String getGrammarFileName() { return "/home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g"; } + + // $ANTLR start "T__20" + public final void mT__20() throws RecognitionException { + try { + int _type = T__20; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:11:7: ( '(' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:11:9: '(' + { + match('('); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__20" + + // $ANTLR start "T__21" + public final void mT__21() throws RecognitionException { + try { + int _type = T__21; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:12:7: ( ')' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:12:9: ')' + { + match(')'); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__21" + + // $ANTLR start "T__22" + public final void mT__22() throws RecognitionException { + try { + int _type = T__22; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:13:7: ( ',' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:13:9: ',' + { + match(','); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__22" + + // $ANTLR start "T__23" + public final void mT__23() throws RecognitionException { + try { + int _type = T__23; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:14:7: ( '.' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:14:9: '.' + { + match('.'); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__23" + + // $ANTLR start "T__24" + public final void mT__24() throws RecognitionException { + try { + int _type = T__24; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:15:7: ( ':' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:15:9: ':' + { + match(':'); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__24" + + // $ANTLR start "T__25" + public final void mT__25() throws RecognitionException { + try { + int _type = T__25; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:16:7: ( '<' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:16:9: '<' + { + match('<'); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__25" + + // $ANTLR start "T__26" + public final void mT__26() throws RecognitionException { + try { + int _type = T__26; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:17:7: ( '<-' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:17:9: '<-' + { + match("<-"); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__26" + + // $ANTLR start "T__27" + public final void mT__27() throws RecognitionException { + try { + int _type = T__27; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:18:7: ( '>' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:18:9: '>' + { + match('>'); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__27" + + // $ANTLR start "T__28" + public final void mT__28() throws RecognitionException { + try { + int _type = T__28; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:19:7: ( '?' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:19:9: '?' + { + match('?'); + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__28" + + // $ANTLR start "T__29" + public final void mT__29() throws RecognitionException { + try { + int _type = T__29; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:20:7: ( 'prefix' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:20:9: 'prefix' + { + match("prefix"); + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "T__29" + + // $ANTLR start "URLSTRING" + public final void mURLSTRING() throws RecognitionException { + try { + int _type = URLSTRING; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:11: ( ( 'http://' | 'file:/' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '~' | '_' )+ ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:13: ( 'http://' | 'file:/' ) ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '~' | '_' )+ + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:13: ( 'http://' | 'file:/' ) + int alt1=2; + int LA1_0 = input.LA(1); + if ( (LA1_0=='h') ) { + alt1=1; + } + else if ( (LA1_0=='f') ) { + alt1=2; + } + + else { + NoViableAltException nvae = + new NoViableAltException("", 1, 0, input); + throw nvae; + } + + switch (alt1) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:14: 'http://' + { + match("http://"); + + } + break; + case 2 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:24: 'file:/' + { + match("file:/"); + + } + break; + + } + + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:138:34: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '~' | '_' )+ + int cnt2=0; + loop2: + while (true) { + int alt2=2; + int LA2_0 = input.LA(1); + if ( (LA2_0=='#'||(LA2_0 >= '-' && LA2_0 <= '9')||(LA2_0 >= 'A' && LA2_0 <= 'Z')||LA2_0=='_'||(LA2_0 >= 'a' && LA2_0 <= 'z')||LA2_0=='~') ) { + alt2=1; + } + + switch (alt2) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g: + { + if ( input.LA(1)=='#'||(input.LA(1) >= '-' && input.LA(1) <= '9')||(input.LA(1) >= 'A' && input.LA(1) <= 'Z')||input.LA(1)=='_'||(input.LA(1) >= 'a' && input.LA(1) <= 'z')||input.LA(1)=='~' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + } + break; + + default : + if ( cnt2 >= 1 ) break loop2; + EarlyExitException eee = new EarlyExitException(2, input); + throw eee; + } + cnt2++; + } + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "URLSTRING" + + // $ANTLR start "STRING" + public final void mSTRING() throws RecognitionException { + try { + int _type = STRING; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:139:9: ( ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '_' )+ ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:139:13: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '_' )+ + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:139:13: ( 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '/' | '#' | '.' | '-' | '_' )+ + int cnt3=0; + loop3: + while (true) { + int alt3=2; + int LA3_0 = input.LA(1); + if ( (LA3_0=='#'||(LA3_0 >= '-' && LA3_0 <= '9')||(LA3_0 >= 'A' && LA3_0 <= 'Z')||LA3_0=='_'||(LA3_0 >= 'a' && LA3_0 <= 'z')) ) { + alt3=1; + } + + switch (alt3) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g: + { + if ( input.LA(1)=='#'||(input.LA(1) >= '-' && input.LA(1) <= '9')||(input.LA(1) >= 'A' && input.LA(1) <= 'Z')||input.LA(1)=='_'||(input.LA(1) >= 'a' && input.LA(1) <= 'z') ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + } + break; + + default : + if ( cnt3 >= 1 ) break loop3; + EarlyExitException eee = new EarlyExitException(3, input); + throw eee; + } + cnt3++; + } + + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "STRING" + + // $ANTLR start "WS" + public final void mWS() throws RecognitionException { + try { + int _type = WS; + int _channel = DEFAULT_TOKEN_CHANNEL; + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:140:5: ( ( ' ' | '\\n' | '\\r' )+ ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:140:7: ( ' ' | '\\n' | '\\r' )+ + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:140:7: ( ' ' | '\\n' | '\\r' )+ + int cnt4=0; + loop4: + while (true) { + int alt4=2; + int LA4_0 = input.LA(1); + if ( (LA4_0=='\n'||LA4_0=='\r'||LA4_0==' ') ) { + alt4=1; + } + + switch (alt4) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g: + { + if ( input.LA(1)=='\n'||input.LA(1)=='\r'||input.LA(1)==' ' ) { + input.consume(); + } + else { + MismatchedSetException mse = new MismatchedSetException(null,input); + recover(mse); + throw mse; + } + } + break; + + default : + if ( cnt4 >= 1 ) break loop4; + EarlyExitException eee = new EarlyExitException(4, input); + throw eee; + } + cnt4++; + } + + _channel=HIDDEN; + } + + state.type = _type; + state.channel = _channel; + } + finally { + // do for sure before leaving + } + } + // $ANTLR end "WS" + + @Override + public void mTokens() throws RecognitionException { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:8: ( T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | URLSTRING | STRING | WS ) + int alt5=13; + switch ( input.LA(1) ) { + case '(': + { + alt5=1; + } + break; + case ')': + { + alt5=2; + } + break; + case ',': + { + alt5=3; + } + break; + case '.': + { + int LA5_4 = input.LA(2); + if ( (LA5_4=='#'||(LA5_4 >= '-' && LA5_4 <= '9')||(LA5_4 >= 'A' && LA5_4 <= 'Z')||LA5_4=='_'||(LA5_4 >= 'a' && LA5_4 <= 'z')) ) { + alt5=12; + } + + else { + alt5=4; + } + + } + break; + case ':': + { + alt5=5; + } + break; + case '<': + { + int LA5_6 = input.LA(2); + if ( (LA5_6=='-') ) { + alt5=7; + } + + else { + alt5=6; + } + + } + break; + case '>': + { + alt5=8; + } + break; + case '?': + { + alt5=9; + } + break; + case 'p': + { + int LA5_9 = input.LA(2); + if ( (LA5_9=='r') ) { + int LA5_17 = input.LA(3); + if ( (LA5_17=='e') ) { + int LA5_20 = input.LA(4); + if ( (LA5_20=='f') ) { + int LA5_23 = input.LA(5); + if ( (LA5_23=='i') ) { + int LA5_26 = input.LA(6); + if ( (LA5_26=='x') ) { + int LA5_28 = input.LA(7); + if ( (LA5_28=='#'||(LA5_28 >= '-' && LA5_28 <= '9')||(LA5_28 >= 'A' && LA5_28 <= 'Z')||LA5_28=='_'||(LA5_28 >= 'a' && LA5_28 <= 'z')) ) { + alt5=12; + } + + else { + alt5=10; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + break; + case 'h': + { + int LA5_10 = input.LA(2); + if ( (LA5_10=='t') ) { + int LA5_18 = input.LA(3); + if ( (LA5_18=='t') ) { + int LA5_21 = input.LA(4); + if ( (LA5_21=='p') ) { + int LA5_24 = input.LA(5); + if ( (LA5_24==':') ) { + alt5=11; + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + break; + case 'f': + { + int LA5_11 = input.LA(2); + if ( (LA5_11=='i') ) { + int LA5_19 = input.LA(3); + if ( (LA5_19=='l') ) { + int LA5_22 = input.LA(4); + if ( (LA5_22=='e') ) { + int LA5_25 = input.LA(5); + if ( (LA5_25==':') ) { + alt5=11; + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + + else { + alt5=12; + } + + } + break; + case '#': + case '-': + case '/': + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case 'A': + case 'B': + case 'C': + case 'D': + case 'E': + case 'F': + case 'G': + case 'H': + case 'I': + case 'J': + case 'K': + case 'L': + case 'M': + case 'N': + case 'O': + case 'P': + case 'Q': + case 'R': + case 'S': + case 'T': + case 'U': + case 'V': + case 'W': + case 'X': + case 'Y': + case 'Z': + case '_': + case 'a': + case 'b': + case 'c': + case 'd': + case 'e': + case 'g': + case 'i': + case 'j': + case 'k': + case 'l': + case 'm': + case 'n': + case 'o': + case 'q': + case 'r': + case 's': + case 't': + case 'u': + case 'v': + case 'w': + case 'x': + case 'y': + case 'z': + { + alt5=12; + } + break; + case '\n': + case '\r': + case ' ': + { + alt5=13; + } + break; + default: + NoViableAltException nvae = + new NoViableAltException("", 5, 0, input); + throw nvae; + } + switch (alt5) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:10: T__20 + { + mT__20(); + + } + break; + case 2 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:16: T__21 + { + mT__21(); + + } + break; + case 3 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:22: T__22 + { + mT__22(); + + } + break; + case 4 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:28: T__23 + { + mT__23(); + + } + break; + case 5 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:34: T__24 + { + mT__24(); + + } + break; + case 6 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:40: T__25 + { + mT__25(); + + } + break; + case 7 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:46: T__26 + { + mT__26(); + + } + break; + case 8 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:52: T__27 + { + mT__27(); + + } + break; + case 9 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:58: T__28 + { + mT__28(); + + } + break; + case 10 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:64: T__29 + { + mT__29(); + + } + break; + case 11 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:70: URLSTRING + { + mURLSTRING(); + + } + break; + case 12 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:80: STRING + { + mSTRING(); + + } + break; + case 13 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:1:87: WS + { + mWS(); + + } + break; + + } + } + + + +} diff --git a/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java new file mode 100644 index 0000000..b934e30 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryParser.java @@ -0,0 +1,1611 @@ +// $ANTLR 3.5 /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g 2013-12-13 14:41:34 + +package org.semanticweb.karma2.model.cqparser; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Set; + +import org.semanticweb.karma2.model.ConjunctiveQuery; + + +import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryWalker; +import org.semanticweb.karma2.exception.IllegalInputQueryException; + + + + + +import org.antlr.runtime.*; +import java.util.Stack; +import java.util.List; +import java.util.ArrayList; + +import org.antlr.runtime.tree.*; + + +@SuppressWarnings("all") +public class ConjunctiveQueryParser extends Parser { + public static final String[] tokenNames = new String[] { + "", "", "", "", "ATOM", "ATOM_LIST", "CONSTANT", + "EXPRESSION", "HEADATOM", "ID", "PREDICATE", "PREFIX", "PREFIX_LIST", + "RULE", "SCONSTANT", "STRING", "TERM_LIST", "URLSTRING", "VARIABLE", "WS", + "'('", "')'", "','", "'.'", "':'", "'<'", "'<-'", "'>'", "'?'", "'prefix'" + }; + public static final int EOF=-1; + public static final int T__20=20; + public static final int T__21=21; + public static final int T__22=22; + public static final int T__23=23; + public static final int T__24=24; + public static final int T__25=25; + public static final int T__26=26; + public static final int T__27=27; + public static final int T__28=28; + public static final int T__29=29; + public static final int ATOM=4; + public static final int ATOM_LIST=5; + public static final int CONSTANT=6; + public static final int EXPRESSION=7; + public static final int HEADATOM=8; + public static final int ID=9; + public static final int PREDICATE=10; + public static final int PREFIX=11; + public static final int PREFIX_LIST=12; + public static final int RULE=13; + public static final int SCONSTANT=14; + public static final int STRING=15; + public static final int TERM_LIST=16; + public static final int URLSTRING=17; + public static final int VARIABLE=18; + public static final int WS=19; + + // delegates + public Parser[] getDelegates() { + return new Parser[] {}; + } + + // delegators + + + public ConjunctiveQueryParser(TokenStream input) { + this(input, new RecognizerSharedState()); + } + public ConjunctiveQueryParser(TokenStream input, RecognizerSharedState state) { + super(input, state); + } + + protected TreeAdaptor adaptor = new CommonTreeAdaptor(); + + public void setTreeAdaptor(TreeAdaptor adaptor) { + this.adaptor = adaptor; + } + public TreeAdaptor getTreeAdaptor() { + return adaptor; + } + @Override public String[] getTokenNames() { return ConjunctiveQueryParser.tokenNames; } + @Override public String getGrammarFileName() { return "/home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g"; } + + + + + + public ConjunctiveQueryParser(String string) + throws FileNotFoundException, IOException { + this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRStringStream(string)))); + } + + public ConjunctiveQueryParser(InputStream istream) throws FileNotFoundException, IOException { + this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(istream)))); + + } + + + public ConjunctiveQueryParser(File file) throws FileNotFoundException, IOException { + this(new CommonTokenStream(new ConjunctiveQueryLexer(new ANTLRInputStream(new FileInputStream(file))))); + + } + + public ConjunctiveQuery parse() throws IllegalInputQueryException { + cq_return r = null; + try { + r = cq(); + } catch (RecognitionException e) { + e.printStackTrace(); + } + CommonTree t = (CommonTree) r.getTree(); + + CommonTreeNodeStream nodes = new CommonTreeNodeStream(t); + // AST nodes have payloads that point into token stream + nodes.setTokenStream(input); + + + ConjunctiveQueryWalker walker = new ConjunctiveQueryWalker(); + + ConjunctiveQuery cq = walker.walkExpressionNode(t); + return cq; + } + + public ConjunctiveQuery parseCQ() throws IllegalInputQueryException { + return parse(); + } + + + + public static class cq_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "cq" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:97:1: cq : prefixlist rulebody -> ^( EXPRESSION prefixlist rulebody ) ; + public final ConjunctiveQueryParser.cq_return cq() throws RecognitionException { + ConjunctiveQueryParser.cq_return retval = new ConjunctiveQueryParser.cq_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + ParserRuleReturnScope prefixlist1 =null; + ParserRuleReturnScope rulebody2 =null; + + RewriteRuleSubtreeStream stream_rulebody=new RewriteRuleSubtreeStream(adaptor,"rule rulebody"); + RewriteRuleSubtreeStream stream_prefixlist=new RewriteRuleSubtreeStream(adaptor,"rule prefixlist"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:97:4: ( prefixlist rulebody -> ^( EXPRESSION prefixlist rulebody ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:98:3: prefixlist rulebody + { + pushFollow(FOLLOW_prefixlist_in_cq132); + prefixlist1=prefixlist(); + state._fsp--; + + stream_prefixlist.add(prefixlist1.getTree()); + pushFollow(FOLLOW_rulebody_in_cq134); + rulebody2=rulebody(); + state._fsp--; + + stream_rulebody.add(rulebody2.getTree()); + // AST REWRITE + // elements: rulebody, prefixlist + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 98:23: -> ^( EXPRESSION prefixlist rulebody ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:98:26: ^( EXPRESSION prefixlist rulebody ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(EXPRESSION, "EXPRESSION"), root_1); + adaptor.addChild(root_1, stream_prefixlist.nextTree()); + adaptor.addChild(root_1, stream_rulebody.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "cq" + + + public static class prefixlist_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "prefixlist" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:100:1: prefixlist : prefix ( ',' prefix )* -> ^( PREFIX_LIST ( prefix )* ) ; + public final ConjunctiveQueryParser.prefixlist_return prefixlist() throws RecognitionException { + ConjunctiveQueryParser.prefixlist_return retval = new ConjunctiveQueryParser.prefixlist_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal4=null; + ParserRuleReturnScope prefix3 =null; + ParserRuleReturnScope prefix5 =null; + + Object char_literal4_tree=null; + RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22"); + RewriteRuleSubtreeStream stream_prefix=new RewriteRuleSubtreeStream(adaptor,"rule prefix"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:100:11: ( prefix ( ',' prefix )* -> ^( PREFIX_LIST ( prefix )* ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:3: prefix ( ',' prefix )* + { + pushFollow(FOLLOW_prefix_in_prefixlist154); + prefix3=prefix(); + state._fsp--; + + stream_prefix.add(prefix3.getTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:10: ( ',' prefix )* + loop1: + while (true) { + int alt1=2; + int LA1_0 = input.LA(1); + if ( (LA1_0==22) ) { + alt1=1; + } + + switch (alt1) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:11: ',' prefix + { + char_literal4=(Token)match(input,22,FOLLOW_22_in_prefixlist157); + stream_22.add(char_literal4); + + pushFollow(FOLLOW_prefix_in_prefixlist159); + prefix5=prefix(); + state._fsp--; + + stream_prefix.add(prefix5.getTree()); + } + break; + + default : + break loop1; + } + } + + // AST REWRITE + // elements: prefix + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 101:24: -> ^( PREFIX_LIST ( prefix )* ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:27: ^( PREFIX_LIST ( prefix )* ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(PREFIX_LIST, "PREFIX_LIST"), root_1); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:101:41: ( prefix )* + while ( stream_prefix.hasNext() ) { + adaptor.addChild(root_1, stream_prefix.nextTree()); + } + stream_prefix.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "prefixlist" + + + public static class prefix_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "prefix" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:103:1: prefix : 'prefix' id ':' '<' url '>' -> ^( PREFIX id url ) ; + public final ConjunctiveQueryParser.prefix_return prefix() throws RecognitionException { + ConjunctiveQueryParser.prefix_return retval = new ConjunctiveQueryParser.prefix_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token string_literal6=null; + Token char_literal8=null; + Token char_literal9=null; + Token char_literal11=null; + ParserRuleReturnScope id7 =null; + ParserRuleReturnScope url10 =null; + + Object string_literal6_tree=null; + Object char_literal8_tree=null; + Object char_literal9_tree=null; + Object char_literal11_tree=null; + RewriteRuleTokenStream stream_24=new RewriteRuleTokenStream(adaptor,"token 24"); + RewriteRuleTokenStream stream_25=new RewriteRuleTokenStream(adaptor,"token 25"); + RewriteRuleTokenStream stream_27=new RewriteRuleTokenStream(adaptor,"token 27"); + RewriteRuleTokenStream stream_29=new RewriteRuleTokenStream(adaptor,"token 29"); + RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id"); + RewriteRuleSubtreeStream stream_url=new RewriteRuleSubtreeStream(adaptor,"rule url"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:103:7: ( 'prefix' id ':' '<' url '>' -> ^( PREFIX id url ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:104:3: 'prefix' id ':' '<' url '>' + { + string_literal6=(Token)match(input,29,FOLLOW_29_in_prefix181); + stream_29.add(string_literal6); + + pushFollow(FOLLOW_id_in_prefix183); + id7=id(); + state._fsp--; + + stream_id.add(id7.getTree()); + char_literal8=(Token)match(input,24,FOLLOW_24_in_prefix185); + stream_24.add(char_literal8); + + char_literal9=(Token)match(input,25,FOLLOW_25_in_prefix187); + stream_25.add(char_literal9); + + pushFollow(FOLLOW_url_in_prefix189); + url10=url(); + state._fsp--; + + stream_url.add(url10.getTree()); + char_literal11=(Token)match(input,27,FOLLOW_27_in_prefix191); + stream_27.add(char_literal11); + + // AST REWRITE + // elements: id, url + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 104:31: -> ^( PREFIX id url ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:104:34: ^( PREFIX id url ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(PREFIX, "PREFIX"), root_1); + adaptor.addChild(root_1, stream_id.nextTree()); + adaptor.addChild(root_1, stream_url.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "prefix" + + + public static class rulebody_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "rulebody" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:107:1: rulebody : headatom ( '<-' | ':' ) body ( '.' )? -> ^( RULE headatom body ) ; + public final ConjunctiveQueryParser.rulebody_return rulebody() throws RecognitionException { + ConjunctiveQueryParser.rulebody_return retval = new ConjunctiveQueryParser.rulebody_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token string_literal13=null; + Token char_literal14=null; + Token char_literal16=null; + ParserRuleReturnScope headatom12 =null; + ParserRuleReturnScope body15 =null; + + Object string_literal13_tree=null; + Object char_literal14_tree=null; + Object char_literal16_tree=null; + RewriteRuleTokenStream stream_23=new RewriteRuleTokenStream(adaptor,"token 23"); + RewriteRuleTokenStream stream_24=new RewriteRuleTokenStream(adaptor,"token 24"); + RewriteRuleTokenStream stream_26=new RewriteRuleTokenStream(adaptor,"token 26"); + RewriteRuleSubtreeStream stream_headatom=new RewriteRuleSubtreeStream(adaptor,"rule headatom"); + RewriteRuleSubtreeStream stream_body=new RewriteRuleSubtreeStream(adaptor,"rule body"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:107:9: ( headatom ( '<-' | ':' ) body ( '.' )? -> ^( RULE headatom body ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:3: headatom ( '<-' | ':' ) body ( '.' )? + { + pushFollow(FOLLOW_headatom_in_rulebody213); + headatom12=headatom(); + state._fsp--; + + stream_headatom.add(headatom12.getTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:12: ( '<-' | ':' ) + int alt2=2; + int LA2_0 = input.LA(1); + if ( (LA2_0==26) ) { + alt2=1; + } + else if ( (LA2_0==24) ) { + alt2=2; + } + + else { + NoViableAltException nvae = + new NoViableAltException("", 2, 0, input); + throw nvae; + } + + switch (alt2) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:13: '<-' + { + string_literal13=(Token)match(input,26,FOLLOW_26_in_rulebody216); + stream_26.add(string_literal13); + + } + break; + case 2 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:18: ':' + { + char_literal14=(Token)match(input,24,FOLLOW_24_in_rulebody218); + stream_24.add(char_literal14); + + } + break; + + } + + pushFollow(FOLLOW_body_in_rulebody221); + body15=body(); + state._fsp--; + + stream_body.add(body15.getTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:28: ( '.' )? + int alt3=2; + int LA3_0 = input.LA(1); + if ( (LA3_0==23) ) { + alt3=1; + } + switch (alt3) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:28: '.' + { + char_literal16=(Token)match(input,23,FOLLOW_23_in_rulebody223); + stream_23.add(char_literal16); + + } + break; + + } + + // AST REWRITE + // elements: headatom, body + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 108:34: -> ^( RULE headatom body ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:108:37: ^( RULE headatom body ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(RULE, "RULE"), root_1); + adaptor.addChild(root_1, stream_headatom.nextTree()); + adaptor.addChild(root_1, stream_body.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "rulebody" + + + public static class body_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "body" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:110:1: body : atom ( ',' atom )* -> ^( ATOM_LIST ( atom )* ) ; + public final ConjunctiveQueryParser.body_return body() throws RecognitionException { + ConjunctiveQueryParser.body_return retval = new ConjunctiveQueryParser.body_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal18=null; + ParserRuleReturnScope atom17 =null; + ParserRuleReturnScope atom19 =null; + + Object char_literal18_tree=null; + RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22"); + RewriteRuleSubtreeStream stream_atom=new RewriteRuleSubtreeStream(adaptor,"rule atom"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:110:5: ( atom ( ',' atom )* -> ^( ATOM_LIST ( atom )* ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:3: atom ( ',' atom )* + { + pushFollow(FOLLOW_atom_in_body245); + atom17=atom(); + state._fsp--; + + stream_atom.add(atom17.getTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:8: ( ',' atom )* + loop4: + while (true) { + int alt4=2; + int LA4_0 = input.LA(1); + if ( (LA4_0==22) ) { + alt4=1; + } + + switch (alt4) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:9: ',' atom + { + char_literal18=(Token)match(input,22,FOLLOW_22_in_body248); + stream_22.add(char_literal18); + + pushFollow(FOLLOW_atom_in_body250); + atom19=atom(); + state._fsp--; + + stream_atom.add(atom19.getTree()); + } + break; + + default : + break loop4; + } + } + + // AST REWRITE + // elements: atom + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 111:20: -> ^( ATOM_LIST ( atom )* ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:23: ^( ATOM_LIST ( atom )* ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(ATOM_LIST, "ATOM_LIST"), root_1); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:111:35: ( atom )* + while ( stream_atom.hasNext() ) { + adaptor.addChild(root_1, stream_atom.nextTree()); + } + stream_atom.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "body" + + + public static class headatom_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "headatom" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:114:1: headatom : id '(' term ( ',' term )* ')' -> ^( HEADATOM ( term )* ) ; + public final ConjunctiveQueryParser.headatom_return headatom() throws RecognitionException { + ConjunctiveQueryParser.headatom_return retval = new ConjunctiveQueryParser.headatom_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal21=null; + Token char_literal23=null; + Token char_literal25=null; + ParserRuleReturnScope id20 =null; + ParserRuleReturnScope term22 =null; + ParserRuleReturnScope term24 =null; + + Object char_literal21_tree=null; + Object char_literal23_tree=null; + Object char_literal25_tree=null; + RewriteRuleTokenStream stream_21=new RewriteRuleTokenStream(adaptor,"token 21"); + RewriteRuleTokenStream stream_20=new RewriteRuleTokenStream(adaptor,"token 20"); + RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22"); + RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id"); + RewriteRuleSubtreeStream stream_term=new RewriteRuleSubtreeStream(adaptor,"rule term"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:114:9: ( id '(' term ( ',' term )* ')' -> ^( HEADATOM ( term )* ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:3: id '(' term ( ',' term )* ')' + { + pushFollow(FOLLOW_id_in_headatom276); + id20=id(); + state._fsp--; + + stream_id.add(id20.getTree()); + char_literal21=(Token)match(input,20,FOLLOW_20_in_headatom278); + stream_20.add(char_literal21); + + pushFollow(FOLLOW_term_in_headatom280); + term22=term(); + state._fsp--; + + stream_term.add(term22.getTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:15: ( ',' term )* + loop5: + while (true) { + int alt5=2; + int LA5_0 = input.LA(1); + if ( (LA5_0==22) ) { + alt5=1; + } + + switch (alt5) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:16: ',' term + { + char_literal23=(Token)match(input,22,FOLLOW_22_in_headatom283); + stream_22.add(char_literal23); + + pushFollow(FOLLOW_term_in_headatom285); + term24=term(); + state._fsp--; + + stream_term.add(term24.getTree()); + } + break; + + default : + break loop5; + } + } + + char_literal25=(Token)match(input,21,FOLLOW_21_in_headatom289); + stream_21.add(char_literal25); + + // AST REWRITE + // elements: term + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 115:31: -> ^( HEADATOM ( term )* ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:34: ^( HEADATOM ( term )* ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(HEADATOM, "HEADATOM"), root_1); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:115:45: ( term )* + while ( stream_term.hasNext() ) { + adaptor.addChild(root_1, stream_term.nextTree()); + } + stream_term.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "headatom" + + + public static class atom_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "atom" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:117:1: atom : compositeid '(' term ( ',' term )* ')' -> ^( ATOM compositeid ( term )* ) ; + public final ConjunctiveQueryParser.atom_return atom() throws RecognitionException { + ConjunctiveQueryParser.atom_return retval = new ConjunctiveQueryParser.atom_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal27=null; + Token char_literal29=null; + Token char_literal31=null; + ParserRuleReturnScope compositeid26 =null; + ParserRuleReturnScope term28 =null; + ParserRuleReturnScope term30 =null; + + Object char_literal27_tree=null; + Object char_literal29_tree=null; + Object char_literal31_tree=null; + RewriteRuleTokenStream stream_21=new RewriteRuleTokenStream(adaptor,"token 21"); + RewriteRuleTokenStream stream_20=new RewriteRuleTokenStream(adaptor,"token 20"); + RewriteRuleTokenStream stream_22=new RewriteRuleTokenStream(adaptor,"token 22"); + RewriteRuleSubtreeStream stream_term=new RewriteRuleSubtreeStream(adaptor,"rule term"); + RewriteRuleSubtreeStream stream_compositeid=new RewriteRuleSubtreeStream(adaptor,"rule compositeid"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:117:5: ( compositeid '(' term ( ',' term )* ')' -> ^( ATOM compositeid ( term )* ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:3: compositeid '(' term ( ',' term )* ')' + { + pushFollow(FOLLOW_compositeid_in_atom309); + compositeid26=compositeid(); + state._fsp--; + + stream_compositeid.add(compositeid26.getTree()); + char_literal27=(Token)match(input,20,FOLLOW_20_in_atom311); + stream_20.add(char_literal27); + + pushFollow(FOLLOW_term_in_atom313); + term28=term(); + state._fsp--; + + stream_term.add(term28.getTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:24: ( ',' term )* + loop6: + while (true) { + int alt6=2; + int LA6_0 = input.LA(1); + if ( (LA6_0==22) ) { + alt6=1; + } + + switch (alt6) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:25: ',' term + { + char_literal29=(Token)match(input,22,FOLLOW_22_in_atom316); + stream_22.add(char_literal29); + + pushFollow(FOLLOW_term_in_atom318); + term30=term(); + state._fsp--; + + stream_term.add(term30.getTree()); + } + break; + + default : + break loop6; + } + } + + char_literal31=(Token)match(input,21,FOLLOW_21_in_atom322); + stream_21.add(char_literal31); + + // AST REWRITE + // elements: term, compositeid + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 118:40: -> ^( ATOM compositeid ( term )* ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:43: ^( ATOM compositeid ( term )* ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(ATOM, "ATOM"), root_1); + adaptor.addChild(root_1, stream_compositeid.nextTree()); + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:118:62: ( term )* + while ( stream_term.hasNext() ) { + adaptor.addChild(root_1, stream_term.nextTree()); + } + stream_term.reset(); + + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "atom" + + + public static class compositeid_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "compositeid" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:120:1: compositeid : ( id ) ':' ( id ) -> ^( ID id id ) ; + public final ConjunctiveQueryParser.compositeid_return compositeid() throws RecognitionException { + ConjunctiveQueryParser.compositeid_return retval = new ConjunctiveQueryParser.compositeid_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal33=null; + ParserRuleReturnScope id32 =null; + ParserRuleReturnScope id34 =null; + + Object char_literal33_tree=null; + RewriteRuleTokenStream stream_24=new RewriteRuleTokenStream(adaptor,"token 24"); + RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:120:12: ( ( id ) ':' ( id ) -> ^( ID id id ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:2: ( id ) ':' ( id ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:2: ( id ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:3: id + { + pushFollow(FOLLOW_id_in_compositeid342); + id32=id(); + state._fsp--; + + stream_id.add(id32.getTree()); + } + + char_literal33=(Token)match(input,24,FOLLOW_24_in_compositeid345); + stream_24.add(char_literal33); + + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:11: ( id ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:12: id + { + pushFollow(FOLLOW_id_in_compositeid348); + id34=id(); + state._fsp--; + + stream_id.add(id34.getTree()); + } + + // AST REWRITE + // elements: id, id + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 121:16: -> ^( ID id id ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:121:19: ^( ID id id ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(ID, "ID"), root_1); + adaptor.addChild(root_1, stream_id.nextTree()); + adaptor.addChild(root_1, stream_id.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "compositeid" + + + public static class term_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "term" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:124:1: term : ( variable -> ^( VARIABLE variable ) | simpleid -> ^( SCONSTANT simpleid ) | compositeid -> ^( CONSTANT compositeid ) ); + public final ConjunctiveQueryParser.term_return term() throws RecognitionException { + ConjunctiveQueryParser.term_return retval = new ConjunctiveQueryParser.term_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + ParserRuleReturnScope variable35 =null; + ParserRuleReturnScope simpleid36 =null; + ParserRuleReturnScope compositeid37 =null; + + RewriteRuleSubtreeStream stream_simpleid=new RewriteRuleSubtreeStream(adaptor,"rule simpleid"); + RewriteRuleSubtreeStream stream_compositeid=new RewriteRuleSubtreeStream(adaptor,"rule compositeid"); + RewriteRuleSubtreeStream stream_variable=new RewriteRuleSubtreeStream(adaptor,"rule variable"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:124:5: ( variable -> ^( VARIABLE variable ) | simpleid -> ^( SCONSTANT simpleid ) | compositeid -> ^( CONSTANT compositeid ) ) + int alt7=3; + switch ( input.LA(1) ) { + case 28: + { + alt7=1; + } + break; + case 25: + { + alt7=2; + } + break; + case STRING: + { + alt7=3; + } + break; + default: + NoViableAltException nvae = + new NoViableAltException("", 7, 0, input); + throw nvae; + } + switch (alt7) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:125:3: variable + { + pushFollow(FOLLOW_variable_in_term371); + variable35=variable(); + state._fsp--; + + stream_variable.add(variable35.getTree()); + // AST REWRITE + // elements: variable + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 125:12: -> ^( VARIABLE variable ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:125:15: ^( VARIABLE variable ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(VARIABLE, "VARIABLE"), root_1); + adaptor.addChild(root_1, stream_variable.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + break; + case 2 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:126:5: simpleid + { + pushFollow(FOLLOW_simpleid_in_term386); + simpleid36=simpleid(); + state._fsp--; + + stream_simpleid.add(simpleid36.getTree()); + // AST REWRITE + // elements: simpleid + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 126:14: -> ^( SCONSTANT simpleid ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:126:17: ^( SCONSTANT simpleid ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(SCONSTANT, "SCONSTANT"), root_1); + adaptor.addChild(root_1, stream_simpleid.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + break; + case 3 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:127:5: compositeid + { + pushFollow(FOLLOW_compositeid_in_term400); + compositeid37=compositeid(); + state._fsp--; + + stream_compositeid.add(compositeid37.getTree()); + // AST REWRITE + // elements: compositeid + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 127:17: -> ^( CONSTANT compositeid ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:127:20: ^( CONSTANT compositeid ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(CONSTANT, "CONSTANT"), root_1); + adaptor.addChild(root_1, stream_compositeid.nextTree()); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + break; + + } + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "term" + + + public static class id_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "id" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:1: id : ( STRING ) ; + public final ConjunctiveQueryParser.id_return id() throws RecognitionException { + ConjunctiveQueryParser.id_return retval = new ConjunctiveQueryParser.id_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token STRING38=null; + + Object STRING38_tree=null; + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:4: ( ( STRING ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:6: ( STRING ) + { + root_0 = (Object)adaptor.nil(); + + + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:6: ( STRING ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:129:7: STRING + { + STRING38=(Token)match(input,STRING,FOLLOW_STRING_in_id417); + STRING38_tree = (Object)adaptor.create(STRING38); + adaptor.addChild(root_0, STRING38_tree); + + } + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "id" + + + public static class simpleid_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "simpleid" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:1: simpleid : ( '<' URLSTRING '>' | '<' STRING '>' ); + public final ConjunctiveQueryParser.simpleid_return simpleid() throws RecognitionException { + ConjunctiveQueryParser.simpleid_return retval = new ConjunctiveQueryParser.simpleid_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal39=null; + Token URLSTRING40=null; + Token char_literal41=null; + Token char_literal42=null; + Token STRING43=null; + Token char_literal44=null; + + Object char_literal39_tree=null; + Object URLSTRING40_tree=null; + Object char_literal41_tree=null; + Object char_literal42_tree=null; + Object STRING43_tree=null; + Object char_literal44_tree=null; + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:10: ( '<' URLSTRING '>' | '<' STRING '>' ) + int alt8=2; + int LA8_0 = input.LA(1); + if ( (LA8_0==25) ) { + int LA8_1 = input.LA(2); + if ( (LA8_1==URLSTRING) ) { + alt8=1; + } + else if ( (LA8_1==STRING) ) { + alt8=2; + } + + else { + int nvaeMark = input.mark(); + try { + input.consume(); + NoViableAltException nvae = + new NoViableAltException("", 8, 1, input); + throw nvae; + } finally { + input.rewind(nvaeMark); + } + } + + } + + else { + NoViableAltException nvae = + new NoViableAltException("", 8, 0, input); + throw nvae; + } + + switch (alt8) { + case 1 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:12: '<' URLSTRING '>' + { + root_0 = (Object)adaptor.nil(); + + + char_literal39=(Token)match(input,25,FOLLOW_25_in_simpleid425); + char_literal39_tree = (Object)adaptor.create(char_literal39); + adaptor.addChild(root_0, char_literal39_tree); + + URLSTRING40=(Token)match(input,URLSTRING,FOLLOW_URLSTRING_in_simpleid427); + URLSTRING40_tree = (Object)adaptor.create(URLSTRING40); + adaptor.addChild(root_0, URLSTRING40_tree); + + char_literal41=(Token)match(input,27,FOLLOW_27_in_simpleid429); + char_literal41_tree = (Object)adaptor.create(char_literal41); + adaptor.addChild(root_0, char_literal41_tree); + + } + break; + case 2 : + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:130:32: '<' STRING '>' + { + root_0 = (Object)adaptor.nil(); + + + char_literal42=(Token)match(input,25,FOLLOW_25_in_simpleid433); + char_literal42_tree = (Object)adaptor.create(char_literal42); + adaptor.addChild(root_0, char_literal42_tree); + + STRING43=(Token)match(input,STRING,FOLLOW_STRING_in_simpleid435); + STRING43_tree = (Object)adaptor.create(STRING43); + adaptor.addChild(root_0, STRING43_tree); + + char_literal44=(Token)match(input,27,FOLLOW_27_in_simpleid437); + char_literal44_tree = (Object)adaptor.create(char_literal44); + adaptor.addChild(root_0, char_literal44_tree); + + } + break; + + } + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "simpleid" + + + public static class variable_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "variable" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:133:1: variable : ( '?' ) id -> ^( id ) ; + public final ConjunctiveQueryParser.variable_return variable() throws RecognitionException { + ConjunctiveQueryParser.variable_return retval = new ConjunctiveQueryParser.variable_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token char_literal45=null; + ParserRuleReturnScope id46 =null; + + Object char_literal45_tree=null; + RewriteRuleTokenStream stream_28=new RewriteRuleTokenStream(adaptor,"token 28"); + RewriteRuleSubtreeStream stream_id=new RewriteRuleSubtreeStream(adaptor,"rule id"); + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:133:9: ( ( '?' ) id -> ^( id ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:3: ( '?' ) id + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:3: ( '?' ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:4: '?' + { + char_literal45=(Token)match(input,28,FOLLOW_28_in_variable448); + stream_28.add(char_literal45); + + } + + pushFollow(FOLLOW_id_in_variable451); + id46=id(); + state._fsp--; + + stream_id.add(id46.getTree()); + // AST REWRITE + // elements: id + // token labels: + // rule labels: retval + // token list labels: + // rule list labels: + // wildcard labels: + retval.tree = root_0; + RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); + + root_0 = (Object)adaptor.nil(); + // 134:12: -> ^( id ) + { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:134:15: ^( id ) + { + Object root_1 = (Object)adaptor.nil(); + root_1 = (Object)adaptor.becomeRoot(stream_id.nextNode(), root_1); + adaptor.addChild(root_0, root_1); + } + + } + + + retval.tree = root_0; + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "variable" + + + public static class url_return extends ParserRuleReturnScope { + Object tree; + @Override + public Object getTree() { return tree; } + }; + + + // $ANTLR start "url" + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:2: url : ( URLSTRING ) ; + public final ConjunctiveQueryParser.url_return url() throws RecognitionException { + ConjunctiveQueryParser.url_return retval = new ConjunctiveQueryParser.url_return(); + retval.start = input.LT(1); + + Object root_0 = null; + + Token URLSTRING47=null; + + Object URLSTRING47_tree=null; + + try { + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:6: ( ( URLSTRING ) ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:8: ( URLSTRING ) + { + root_0 = (Object)adaptor.nil(); + + + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:8: ( URLSTRING ) + // /home/yzhou/workspace/KARMA/src/org/semanticweb/karma2/model/cqparser/ConjunctiveQuery.g:136:9: URLSTRING + { + URLSTRING47=(Token)match(input,URLSTRING,FOLLOW_URLSTRING_in_url469); + URLSTRING47_tree = (Object)adaptor.create(URLSTRING47); + adaptor.addChild(root_0, URLSTRING47_tree); + + } + + } + + retval.stop = input.LT(-1); + + retval.tree = (Object)adaptor.rulePostProcessing(root_0); + adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); + + } + catch (RecognitionException re) { + reportError(re); + recover(input,re); + retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); + } + finally { + // do for sure before leaving + } + return retval; + } + // $ANTLR end "url" + + // Delegated rules + + + + public static final BitSet FOLLOW_prefixlist_in_cq132 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_rulebody_in_cq134 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_prefix_in_prefixlist154 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_22_in_prefixlist157 = new BitSet(new long[]{0x0000000020000000L}); + public static final BitSet FOLLOW_prefix_in_prefixlist159 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_29_in_prefix181 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_id_in_prefix183 = new BitSet(new long[]{0x0000000001000000L}); + public static final BitSet FOLLOW_24_in_prefix185 = new BitSet(new long[]{0x0000000002000000L}); + public static final BitSet FOLLOW_25_in_prefix187 = new BitSet(new long[]{0x0000000000020000L}); + public static final BitSet FOLLOW_url_in_prefix189 = new BitSet(new long[]{0x0000000008000000L}); + public static final BitSet FOLLOW_27_in_prefix191 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_headatom_in_rulebody213 = new BitSet(new long[]{0x0000000005000000L}); + public static final BitSet FOLLOW_26_in_rulebody216 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_24_in_rulebody218 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_body_in_rulebody221 = new BitSet(new long[]{0x0000000000800002L}); + public static final BitSet FOLLOW_23_in_rulebody223 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_atom_in_body245 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_22_in_body248 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_atom_in_body250 = new BitSet(new long[]{0x0000000000400002L}); + public static final BitSet FOLLOW_id_in_headatom276 = new BitSet(new long[]{0x0000000000100000L}); + public static final BitSet FOLLOW_20_in_headatom278 = new BitSet(new long[]{0x0000000012008000L}); + public static final BitSet FOLLOW_term_in_headatom280 = new BitSet(new long[]{0x0000000000600000L}); + public static final BitSet FOLLOW_22_in_headatom283 = new BitSet(new long[]{0x0000000012008000L}); + public static final BitSet FOLLOW_term_in_headatom285 = new BitSet(new long[]{0x0000000000600000L}); + public static final BitSet FOLLOW_21_in_headatom289 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_compositeid_in_atom309 = new BitSet(new long[]{0x0000000000100000L}); + public static final BitSet FOLLOW_20_in_atom311 = new BitSet(new long[]{0x0000000012008000L}); + public static final BitSet FOLLOW_term_in_atom313 = new BitSet(new long[]{0x0000000000600000L}); + public static final BitSet FOLLOW_22_in_atom316 = new BitSet(new long[]{0x0000000012008000L}); + public static final BitSet FOLLOW_term_in_atom318 = new BitSet(new long[]{0x0000000000600000L}); + public static final BitSet FOLLOW_21_in_atom322 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_id_in_compositeid342 = new BitSet(new long[]{0x0000000001000000L}); + public static final BitSet FOLLOW_24_in_compositeid345 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_id_in_compositeid348 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_variable_in_term371 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_simpleid_in_term386 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_compositeid_in_term400 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_STRING_in_id417 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_25_in_simpleid425 = new BitSet(new long[]{0x0000000000020000L}); + public static final BitSet FOLLOW_URLSTRING_in_simpleid427 = new BitSet(new long[]{0x0000000008000000L}); + public static final BitSet FOLLOW_27_in_simpleid429 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_25_in_simpleid433 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_STRING_in_simpleid435 = new BitSet(new long[]{0x0000000008000000L}); + public static final BitSet FOLLOW_27_in_simpleid437 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_28_in_variable448 = new BitSet(new long[]{0x0000000000008000L}); + public static final BitSet FOLLOW_id_in_variable451 = new BitSet(new long[]{0x0000000000000002L}); + public static final BitSet FOLLOW_URLSTRING_in_url469 = new BitSet(new long[]{0x0000000000000002L}); +} diff --git a/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java new file mode 100644 index 0000000..f10b761 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/model/cqparser/ConjunctiveQueryWalker.java @@ -0,0 +1,179 @@ +package org.semanticweb.karma2.model.cqparser; + + + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.antlr.runtime.tree.CommonTree; +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.AtomicConcept; +import org.semanticweb.HermiT.model.AtomicRole; +import org.semanticweb.HermiT.model.Individual; +import org.semanticweb.HermiT.model.Term; +import org.semanticweb.HermiT.model.Variable; +import org.semanticweb.karma2.exception.IllegalInputQueryException; +import org.semanticweb.karma2.model.ConjunctiveQuery; + +import uk.ac.ox.cs.JRDFox.Prefixes; +import uk.ac.ox.cs.pagoda.util.Utility; + + + + +public class ConjunctiveQueryWalker { + + + public ConjunctiveQueryWalker() { + + } + + @SuppressWarnings("unchecked") + private List childrenOf(CommonTree node) { + return (List) node.getChildren(); + } + + + private boolean isSafe(Term[] headTerms, Atom[] atoms) { + for (Term t : headTerms) { + if (t instanceof Variable) { + boolean res = false; + for (Atom a : atoms) { + if (a.getArity()==1) { + if (a.getArgument(0).equals(t)) + res = true; + } + if (a.getArity()==2) { + if (a.getArgument(0).equals(t) || a.getArgument(1).equals(t)) + res = true; + } + } + if(!res) + return false; + } + } + return true; + } + + + public ConjunctiveQuery walkExpressionNode(CommonTree ruleNode) throws IllegalInputQueryException { + + assert (ruleNode.getType() == ConjunctiveQueryLexer.EXPRESSION); + + Iterator iterator = childrenOf(ruleNode).iterator(); + + CommonTree prefixList = iterator.next(); + assert (prefixList.getType() == ConjunctiveQueryLexer.PREFIX_LIST); + Prefixes prefixes = walkPrefixList(prefixList); + CommonTree rulebody = iterator.next(); + assert (rulebody.getType() == ConjunctiveQueryLexer.RULE); + return walkRuleNode(rulebody, prefixes); + + } + + public Prefixes walkPrefixList(CommonTree prefixlist) throws IllegalInputQueryException { + assert (prefixlist.getType() == ConjunctiveQueryLexer.PREFIX_LIST); + Prefixes pref = new Prefixes(); + for (CommonTree prefixNode : childrenOf(prefixlist)) { + walkPrefixNode(prefixNode, pref); + } + return pref; + + } + + private void walkPrefixNode(CommonTree prefixNode, Prefixes pref) throws IllegalInputQueryException { + Iterator iterator = childrenOf(prefixNode).iterator(); + CommonTree shortID = iterator.next(); + CommonTree longID = iterator.next(); + pref.declarePrefix(shortID.getText() + ":", longID.getText()); + } + + + public ConjunctiveQuery walkRuleNode(CommonTree ruleNode, Prefixes prefixes) throws IllegalInputQueryException { + + assert (ruleNode.getType() == ConjunctiveQueryLexer.RULE); + + Iterator iterator = childrenOf(ruleNode).iterator(); + + CommonTree headNode = iterator.next(); + assert (headNode.getType() == ConjunctiveQueryLexer.HEADATOM); + Term[] headTerms = walkHeadAtomNode(headNode); + Atom[] atoms = walkAtomList(iterator.next()); + if (!isSafe(headTerms, atoms)) + throw new IllegalInputQueryException("query is not safe"); + return new ConjunctiveQuery(atoms, headTerms, prefixes); + + } + + private Term[] walkHeadAtomNode(CommonTree node) throws IllegalInputQueryException { + List terms = new ArrayList(); + for (CommonTree termNode : childrenOf(node)) { + terms.add(walkTermNode(termNode)); + } + return terms.toArray(new Term[terms.size()]); + } + + + private String walkCompositeId(CommonTree compositeID) { + Iterator iterator = childrenOf(compositeID).iterator(); + return iterator.next().getText() + ":" + iterator.next().getText() ; + } + + private String walkSimpleId(CommonTree termNode) { + Iterator it = childrenOf(termNode).iterator(); + it.next(); + CommonTree t = it.next(); + return t.getText(); + } + + private Term walkTermNode(CommonTree termNode) throws IllegalInputQueryException { + if (termNode.getType() == ConjunctiveQueryLexer.VARIABLE) { + return Variable.create("?" + childrenOf(termNode).iterator().next().getText()); + } + if (termNode.getType() == ConjunctiveQueryLexer.CONSTANT) { + Individual newind = Individual.create(walkCompositeId(childrenOf(termNode).iterator().next())); + Utility.logError(newind); + return newind; + } + if (termNode.getType() == ConjunctiveQueryLexer.SCONSTANT) { + Individual newind = Individual.create(walkSimpleId(termNode)); + return newind; + } + throw new IllegalArgumentException(); + } + + + + public Atom[] walkAtomList(CommonTree node) throws IllegalInputQueryException { + assert (node.getType() == ConjunctiveQueryLexer.ATOM_LIST); + List atoms = new ArrayList(); + for (CommonTree atomNode : childrenOf(node)) { + atoms.add(walkAtomNode(atomNode)); + } + return atoms.toArray(new Atom[atoms.size()]); + + } + + private Atom walkAtomNode(CommonTree atomNode) throws IllegalInputQueryException { + assert (atomNode.getType() == ConjunctiveQueryLexer.ATOM); + Iterator iterator = childrenOf(atomNode).iterator(); + CommonTree id = iterator.next(); + String predicatename = walkCompositeId(id); + List listofterms = new ArrayList(); + while (iterator.hasNext()){ + listofterms.add(walkTermNode(iterator.next())); + } + if(listofterms.isEmpty() || (listofterms.size()>2)) + throw new IllegalInputQueryException("Problem parsing terms in the query"); + Term[] terms = listofterms.toArray(new Term[listofterms.size()]); + if (terms.length == 1) + return Atom.create(AtomicConcept.create(predicatename), terms); + if (terms.length == 2) + return Atom.create(AtomicRole.create(predicatename), terms); + throw new IllegalInputQueryException("Problem parsing terms in the query"); + } + + +} + diff --git a/src/main/java/org/semanticweb/karma2/profile/ELHOProfile.java b/src/main/java/org/semanticweb/karma2/profile/ELHOProfile.java new file mode 100644 index 0000000..23761f0 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/profile/ELHOProfile.java @@ -0,0 +1,261 @@ +package org.semanticweb.karma2.profile; + +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; + +import org.semanticweb.owlapi.model.AxiomType; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataHasValue; +import org.semanticweb.owlapi.model.OWLDataIntersectionOf; +import org.semanticweb.owlapi.model.OWLDataOneOf; +import org.semanticweb.owlapi.model.OWLDataProperty; +import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; +import org.semanticweb.owlapi.model.OWLHasKeyAxiom; +import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectHasSelf; +import org.semanticweb.owlapi.model.OWLObjectMinCardinality; +import org.semanticweb.owlapi.model.OWLObjectOneOf; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; +import org.semanticweb.owlapi.profiles.OWL2ELProfile; +import org.semanticweb.owlapi.profiles.OWLProfile; +import org.semanticweb.owlapi.profiles.OWLProfileReport; +import org.semanticweb.owlapi.profiles.OWLProfileViolation; +import org.semanticweb.owlapi.profiles.violations.UseOfDataOneOfWithMultipleLiterals; +import org.semanticweb.owlapi.profiles.violations.UseOfIllegalAxiom; +import org.semanticweb.owlapi.profiles.violations.UseOfIllegalClassExpression; +import org.semanticweb.owlapi.profiles.violations.UseOfObjectOneOfWithMultipleIndividuals; +import org.semanticweb.owlapi.util.OWLObjectPropertyManager; +import org.semanticweb.owlapi.util.OWLOntologyWalker; +import org.semanticweb.owlapi.util.OWLOntologyWalkerVisitor; +import uk.ac.ox.cs.pagoda.util.Utility; + +public class ELHOProfile implements OWLProfile { + + public OWLOntology getFragment(OWLOntology ontology) { + OWLOntologyManager manager = ontology.getOWLOntologyManager(); + OWLOntology elhoOntology = null; + try { + Utility.logDebug("OntologyID: " + ontology.getOntologyID()); + try { + String ontologyIRI = ontology.getOntologyID().getOntologyIRI().toString(); + if (ontologyIRI.contains(".owl")) + ontologyIRI = ontologyIRI.replace(".owl", "-elho.owl"); + else + ontologyIRI = ontologyIRI + "elho"; + elhoOntology = manager.createOntology(IRI.create(ontologyIRI)); + } catch (NullPointerException e) { +// e.printStackTrace(); + elhoOntology = manager.createOntology(); + } + + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } + for (OWLOntology onto: ontology.getImportsClosure()) + manager.addAxioms(elhoOntology, onto.getAxioms()); + + // TODO to be checked ... + manager.removeAxioms(elhoOntology, elhoOntology.getAxioms(AxiomType.DIFFERENT_INDIVIDUALS)); + + OWLProfileReport report = checkOntology(elhoOntology); + + for (OWLProfileViolation violation: report.getViolations()) { + OWLAxiom axiom = violation.getAxiom(); + manager.removeAxiom(elhoOntology, axiom); + } + Utility.logDebug("ELHO fragment extracted ... "); + + return elhoOntology; + } + + @Override + public OWLProfileReport checkOntology(OWLOntology ontology) { + OWL2ELProfile profile = new OWL2ELProfile(); + OWLProfileReport report = profile.checkOntology(ontology); + Set violations = new HashSet(); + violations.addAll(report.getViolations()); + MyOWLOntologyWalker ontologyWalker = new MyOWLOntologyWalker(ontology.getImportsClosure()); + ELHOProfileObjectVisitor visitor = new ELHOProfileObjectVisitor(ontologyWalker, ontology.getOWLOntologyManager()); + ontologyWalker.walkStructure(visitor); + + for (Iterator iter = violations.iterator(); iter.hasNext(); ) { + OWLProfileViolation vio = iter.next(); + if (vio instanceof UseOfIllegalClassExpression) { + OWLClassExpression exp = ((UseOfIllegalClassExpression) vio).getExpression(); + if (exp instanceof OWLObjectMinCardinality && ((OWLObjectMinCardinality) exp).getCardinality() == 1) + iter.remove(); + } + } + + violations.addAll(visitor.getProfileViolations()); + return new OWLProfileReport(this, violations); + } + + @Override + public String getName() { + return "ELHO"; + } + + protected class ELHOProfileObjectVisitor extends OWLOntologyWalkerVisitor { + + private final OWLOntologyManager man; + + private OWLObjectPropertyManager propertyManager; + + private final Set profileViolations = new HashSet(); + + public ELHOProfileObjectVisitor(OWLOntologyWalker walker, OWLOntologyManager man) { + super(walker); + this.man = man; + } + + public Set getProfileViolations() { + return new HashSet(profileViolations); + } + + @SuppressWarnings("unused") + private OWLObjectPropertyManager getPropertyManager() { + if (propertyManager == null) { + propertyManager = new OWLObjectPropertyManager(getCurrentOntology()); + } + return propertyManager; + } + + + + @Override + public void visit(OWLDataProperty p) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + + @Override + public void visit(OWLObjectOneOf desc) { + if (desc.getIndividuals().size() != 1) { + profileViolations.add(new UseOfObjectOneOfWithMultipleIndividuals(getCurrentOntology(), getCurrentAxiom(), desc)); + } + } + + @Override + public void visit(OWLDataHasValue desc) { + profileViolations.add(new UseOfIllegalClassExpression(getCurrentOntology(), getCurrentAxiom(), desc)); + } + + @Override + public void visit(OWLDataSomeValuesFrom desc) { + profileViolations.add(new UseOfIllegalClassExpression(getCurrentOntology(), getCurrentAxiom(), desc)); + } + + @Override + public void visit(OWLDataIntersectionOf desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLSubDataPropertyOfAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLEquivalentDataPropertiesAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLTransitiveObjectPropertyAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLReflexiveObjectPropertyAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLDataPropertyDomainAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLDataPropertyRangeAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + + + @Override + public void visit(OWLDataPropertyAssertionAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLNegativeDataPropertyAssertionAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLNegativeObjectPropertyAssertionAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLFunctionalDataPropertyAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + @Override + public void visit(OWLHasKeyAxiom desc) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + } + + + + + @Override + public void visit(OWLObjectHasSelf node) { + profileViolations.add(new UseOfIllegalClassExpression(getCurrentOntology(), getCurrentAxiom(), node)); + } + + + @Override + public void visit(OWLDataOneOf node) { + profileViolations.add(new UseOfDataOneOfWithMultipleLiterals(getCurrentOntology(), getCurrentAxiom(), node)); + } + + + + @Override + public void visit(OWLSubPropertyChainOfAxiom axiom) { + profileViolations.add(new UseOfIllegalAxiom(getCurrentOntology(), getCurrentAxiom())); + + } + + @Override + public void visit(OWLOntology ontology) { + propertyManager = null; + } + } + + @Override + public IRI getIRI() { + return null; + } + + +} + diff --git a/src/main/java/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java b/src/main/java/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java new file mode 100644 index 0000000..1ce8076 --- /dev/null +++ b/src/main/java/org/semanticweb/karma2/profile/MyOWLOntologyWalker.java @@ -0,0 +1,914 @@ +package org.semanticweb.karma2.profile; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; +import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; +import org.semanticweb.owlapi.model.OWLDataComplementOf; +import org.semanticweb.owlapi.model.OWLDataExactCardinality; +import org.semanticweb.owlapi.model.OWLDataHasValue; +import org.semanticweb.owlapi.model.OWLDataIntersectionOf; +import org.semanticweb.owlapi.model.OWLDataMaxCardinality; +import org.semanticweb.owlapi.model.OWLDataMinCardinality; +import org.semanticweb.owlapi.model.OWLDataOneOf; +import org.semanticweb.owlapi.model.OWLDataProperty; +import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyExpression; +import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLDataRange; +import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLDataUnionOf; +import org.semanticweb.owlapi.model.OWLDatatype; +import org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom; +import org.semanticweb.owlapi.model.OWLDatatypeRestriction; +import org.semanticweb.owlapi.model.OWLDeclarationAxiom; +import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; +import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLFacetRestriction; +import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLHasKeyAxiom; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLNamedIndividual; +import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObject; +import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectComplementOf; +import org.semanticweb.owlapi.model.OWLObjectExactCardinality; +import org.semanticweb.owlapi.model.OWLObjectHasSelf; +import org.semanticweb.owlapi.model.OWLObjectHasValue; +import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectInverseOf; +import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; +import org.semanticweb.owlapi.model.OWLObjectMinCardinality; +import org.semanticweb.owlapi.model.OWLObjectOneOf; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectUnionOf; +import org.semanticweb.owlapi.model.OWLObjectVisitor; +import org.semanticweb.owlapi.model.OWLObjectVisitorEx; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; +import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.SWRLAtom; +import org.semanticweb.owlapi.model.SWRLBuiltInAtom; +import org.semanticweb.owlapi.model.SWRLClassAtom; +import org.semanticweb.owlapi.model.SWRLDArgument; +import org.semanticweb.owlapi.model.SWRLDataPropertyAtom; +import org.semanticweb.owlapi.model.SWRLDataRangeAtom; +import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom; +import org.semanticweb.owlapi.model.SWRLIndividualArgument; +import org.semanticweb.owlapi.model.SWRLLiteralArgument; +import org.semanticweb.owlapi.model.SWRLObjectPropertyAtom; +import org.semanticweb.owlapi.model.SWRLRule; +import org.semanticweb.owlapi.model.SWRLSameIndividualAtom; +import org.semanticweb.owlapi.model.SWRLVariable; +import org.semanticweb.owlapi.util.OWLOntologyWalker; + + +public class MyOWLOntologyWalker extends OWLOntologyWalker { + + private final Collection ontologies; + + /** + * @param objects the set of objects to visit + */ + public MyOWLOntologyWalker(Set objects) { + this(objects, true); + } + /** + * @param visitDuplicates true if duplicates should be visited + * @param objects the set of objects to visit + */ + public MyOWLOntologyWalker(Set objects, boolean visitDuplicates) { + super(objects); + this.ontologies = new ArrayList(objects); + } + + /** + * @param v visitor to use over the objects + */ + //public void walkStructure(OWLObjectVisitorEx v) { + public void walkStructure(OWLObjectVisitor v) { + this.visitor = v; + StructureWalker walker = new StructureWalker(); + for (OWLOntology o : ontologies) { + o.accept(walker); + } + } + + private class StructureWalker implements OWLObjectVisitor { + + private final Set visited = new HashSet(); + + public StructureWalker() {} + + private void process(OWLObject object) { + if (!visitDuplicates) { + if (!visited.contains(object)) { + visited.add(object); + object.accept(visitor); + } + } + else { + object.accept(visitor); + } + } + + @Override + public void visit(IRI iri) { + process(iri); + } + + @Override + public void visit(OWLOntology ontologyToVisit) { + MyOWLOntologyWalker.this.ontology = ontologyToVisit; + MyOWLOntologyWalker.this.ax = null; + process(ontologyToVisit); + for (OWLAnnotation anno : ontologyToVisit.getAnnotations()) { + anno.accept(this); + } + for (OWLAxiom a : ontologyToVisit.getAxioms()) { + a.accept(this); + } + } + + + @Override + public void visit(OWLAsymmetricObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLClassAssertionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getIndividual().accept(this); + axiom.getClassExpression().accept(this); + } + + + @Override + public void visit(OWLDataPropertyAssertionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubject().accept(this); + axiom.getProperty().accept(this); + axiom.getObject().accept(this); + } + + + @Override + public void visit(OWLDataPropertyDomainAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getDomain().accept(this); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLDataPropertyRangeAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getRange().accept(this); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLSubDataPropertyOfAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubProperty().accept(this); + axiom.getSuperProperty().accept(this); + } + + + @Override + public void visit(OWLDeclarationAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getEntity().accept(this); + } + + + @Override + public void visit(OWLDifferentIndividualsAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLIndividual ind : axiom.getIndividuals()) { + ind.accept(this); + } + } + + + @Override + public void visit(OWLDisjointClassesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLClassExpression desc : axiom.getClassExpressions()) { + desc.accept(this); + } + } + + + @Override + public void visit(OWLDisjointDataPropertiesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLDataPropertyExpression prop : axiom.getProperties()) { + prop.accept(this); + } + } + + + @Override + public void visit(OWLDisjointObjectPropertiesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLObjectPropertyExpression prop : axiom.getProperties()) { + prop.accept(this); + } + } + + + @Override + public void visit(OWLDisjointUnionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getOWLClass().accept(this); + for (OWLClassExpression desc : axiom.getClassExpressions()) { + desc.accept(this); + } + } + + + @Override + public void visit(OWLAnnotationAssertionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubject().accept(this); + axiom.getAnnotation().accept(this); + } + + @Override + public void visit(OWLAnnotationPropertyDomainAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + axiom.getDomain().accept(this); + } + + @Override + public void visit(OWLAnnotationPropertyRangeAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + axiom.getRange().accept(this); + } + + @Override + public void visit(OWLSubAnnotationPropertyOfAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubProperty().accept(this); + axiom.getSuperProperty().accept(this); + } + + @Override + public void visit(OWLAnnotation node) { + process(node); + annotation = node; + node.getProperty().accept(this); + node.getValue().accept(this); + } + + @Override + public void visit(OWLEquivalentClassesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLClassExpression desc : axiom.getClassExpressions()) { + desc.accept(this); + } + } + + + @Override + public void visit(OWLEquivalentDataPropertiesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLDataPropertyExpression prop : axiom.getProperties()) { + prop.accept(this); + } + } + + + @Override + public void visit(OWLEquivalentObjectPropertiesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLObjectPropertyExpression prop : axiom.getProperties()) { + prop.accept(this); + } + } + + + @Override + public void visit(OWLFunctionalDataPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLFunctionalObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + @Override + public void visit(OWLInverseFunctionalObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLInverseObjectPropertiesAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getFirstProperty().accept(this); + axiom.getSecondProperty().accept(this); + } + + + @Override + public void visit(OWLIrreflexiveObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLNegativeDataPropertyAssertionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubject().accept(this); + axiom.getProperty().accept(this); + axiom.getObject().accept(this); + } + + + @Override + public void visit(OWLNegativeObjectPropertyAssertionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubject().accept(this); + axiom.getProperty().accept(this); + axiom.getObject().accept(this); + } + + + @Override + public void visit(OWLObjectPropertyAssertionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubject().accept(this); + axiom.getProperty().accept(this); + axiom.getObject().accept(this); + } + + + @Override + public void visit(OWLSubPropertyChainOfAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLObjectPropertyExpression prop : axiom.getPropertyChain()) { + prop.accept(this); + } + axiom.getSuperProperty().accept(this); + } + + + @Override + public void visit(OWLObjectPropertyDomainAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getDomain().accept(this); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLObjectPropertyRangeAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + axiom.getRange().accept(this); + } + + + @Override + public void visit(OWLSubObjectPropertyOfAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getSubProperty().accept(this); + axiom.getSuperProperty().accept(this); + } + + + @Override + public void visit(OWLReflexiveObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLSameIndividualAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + for (OWLIndividual ind : axiom.getIndividuals()) { + ind.accept(this); + } + } + + + @Override + public void visit(OWLSubClassOfAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + // -ve polarity + axiom.getSubClass().accept(this); + // +ve polarity + axiom.getSuperClass().accept(this); + } + + + @Override + public void visit(OWLSymmetricObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(OWLTransitiveObjectPropertyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getProperty().accept(this); + } + + + @Override + public void visit(SWRLRule rule) { + MyOWLOntologyWalker.this.ax = rule; + process(rule); + for (SWRLAtom at : rule.getBody()) { + at.accept(this); + } + for (SWRLAtom at : rule.getHead()) { + at.accept(this); + } + } + + @Override + public void visit(OWLHasKeyAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getClassExpression().accept(this); + for (OWLObjectPropertyExpression prop : axiom.getObjectPropertyExpressions()) { + prop.accept(this); + } + for (OWLDataPropertyExpression prop : axiom.getDataPropertyExpressions()) { + prop.accept(this); + } + } + + @Override + public void visit(OWLClass desc) { + pushClassExpression(desc); + process(desc); + desc.getIRI().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataAllValuesFrom desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataExactCardinality desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataMaxCardinality desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataMinCardinality desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataSomeValuesFrom desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataHasValue desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getValue().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectAllValuesFrom desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectComplementOf desc) { + pushClassExpression(desc); + process(desc); + desc.getOperand().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectExactCardinality desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectIntersectionOf desc) { + pushClassExpression(desc); + process(desc); + + for (OWLClassExpression op : desc.getOperands()) { + op.accept(this); + } + popClassExpression(); + } + + + @Override + public void visit(OWLObjectMaxCardinality desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectMinCardinality desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectOneOf desc) { + pushClassExpression(desc); + process(desc); + for (OWLIndividual ind : desc.getIndividuals()) { + ind.accept(this); + } + popClassExpression(); + } + + + @Override + public void visit(OWLObjectHasSelf desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectSomeValuesFrom desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getFiller().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLObjectUnionOf desc) { + pushClassExpression(desc); + process(desc); + for (OWLClassExpression op : desc.getOperands()) { + op.accept(this); + } + popClassExpression(); + } + + + @Override + public void visit(OWLObjectHasValue desc) { + pushClassExpression(desc); + process(desc); + desc.getProperty().accept(this); + desc.getValue().accept(this); + popClassExpression(); + } + + + @Override + public void visit(OWLDataComplementOf node) { + pushDataRange(node); + process(node); + node.getDataRange().accept(this); + popDataRange(); + } + + + @Override + public void visit(OWLDataOneOf node) { + pushDataRange(node); + process(node); + for (OWLLiteral con : node.getValues()) { + con.accept(this); + } + popDataRange(); + } + + @Override + public void visit(OWLDataIntersectionOf node) { + pushDataRange(node); + process(node); + for (OWLDataRange rng : node.getOperands()) { + rng.accept(this); + } + popDataRange(); + } + + @Override + public void visit(OWLDataUnionOf node) { + pushDataRange(node); + process(node); + for (OWLDataRange rng : node.getOperands()) { + rng.accept(this); + } + popDataRange(); + } + + @Override + public void visit(OWLFacetRestriction node) { + process(node); + node.getFacetValue().accept(this); + } + + + @Override + public void visit(OWLDatatypeRestriction node) { + pushDataRange(node); + process(node); + node.getDatatype().accept(this); + for (OWLFacetRestriction fr : node.getFacetRestrictions()) { + fr.accept(this); + } + popDataRange(); + } + + + @Override + public void visit(OWLDatatype node) { + pushDataRange(node); + process(node); + popDataRange(); + } + + @Override + public void visit(OWLLiteral node) { + process(node); + node.getDatatype().accept(this); + popDataRange(); + } + + @Override + public void visit(OWLAnnotationProperty property) { + process(property); + property.getIRI().accept(this); + } + + @Override + public void visit(OWLDataProperty property) { + process(property); + property.getIRI().accept(this); + } + + + @Override + public void visit(OWLObjectProperty property) { + process(property); + property.getIRI().accept(this); + } + + + @Override + public void visit(OWLObjectInverseOf property) { + process(property); + property.getInverse().accept(this); + } + + + @Override + public void visit(OWLNamedIndividual individual) { + process(individual); + individual.getIRI().accept(this); + } + + @Override + public void visit(OWLAnonymousIndividual individual) { + process(individual); + } + + @Override + public void visit(SWRLLiteralArgument node) { + process(node); + node.getLiteral().accept(this); + } + + + @Override + public void visit(SWRLVariable node) { + process(node); + } + + + @Override + public void visit(SWRLIndividualArgument node) { + process(node); + node.getIndividual().accept(this); + } + + + @Override + public void visit(SWRLBuiltInAtom node) { + process(node); + for (SWRLDArgument at : node.getArguments()) { + at.accept(this); + } + } + + + @Override + public void visit(SWRLClassAtom node) { + process(node); + node.getArgument().accept(this); + node.getPredicate().accept(this); + } + + + @Override + public void visit(SWRLDataRangeAtom node) { + process(node); + node.getArgument().accept(this); + node.getPredicate().accept(this); + } + + + @Override + public void visit(SWRLDataPropertyAtom node) { + process(node); + node.getPredicate().accept(this); + node.getFirstArgument().accept(this); + node.getSecondArgument().accept(this); + } + + + @Override + public void visit(SWRLDifferentIndividualsAtom node) { + process(node); + node.getFirstArgument().accept(this); + node.getSecondArgument().accept(this); + } + + + @Override + public void visit(SWRLObjectPropertyAtom node) { + process(node); + node.getPredicate().accept(this); + node.getFirstArgument().accept(this); + node.getSecondArgument().accept(this); + } + + + @Override + public void visit(SWRLSameIndividualAtom node) { + process(node); + node.getFirstArgument().accept(this); + node.getSecondArgument().accept(this); + } + + + @Override + public void visit(OWLDatatypeDefinitionAxiom axiom) { + MyOWLOntologyWalker.this.ax = axiom; + process(axiom); + axiom.getDatatype().accept(this); + axiom.getDataRange().accept(this); + } + } +} + -- cgit v1.2.3