From 9ce65c5a963b03ee97fe9cb6c5aa65a3c04a80a8 Mon Sep 17 00:00:00 2001 From: yzhou Date: Tue, 21 Apr 2015 10:34:27 +0100 Subject: initial version --- .../cs/pagoda/reasoner/light/BasicQueryEngine.java | 366 +++++++++++++++++++++ .../reasoner/light/DLPredicateComparator.java | 24 ++ .../ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java | 95 ++++++ .../cs/pagoda/reasoner/light/KarmaQueryEngine.java | 98 ++++++ .../pagoda/reasoner/light/RDFoxAnswerTuples.java | 100 ++++++ .../cs/pagoda/reasoner/light/RDFoxQueryEngine.java | 110 +++++++ .../pagoda/reasoner/light/RDFoxTripleManager.java | 249 ++++++++++++++ 7 files changed, 1042 insertions(+) create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java create mode 100644 src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java (limited to 'src/uk/ac/ox/cs/pagoda/reasoner/light') diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java new file mode 100644 index 0000000..3207ff1 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java @@ -0,0 +1,366 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; + +import org.semanticweb.HermiT.model.DLClause; + +import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.query.GapByStore4ID; +import uk.ac.ox.cs.pagoda.rules.DatalogProgram; +import uk.ac.ox.cs.pagoda.rules.Program; +import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; +import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.UFS; +import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.Parameters; +import uk.ac.ox.cs.JRDFox.store.TripleStatus; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; +import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; + +public class BasicQueryEngine extends RDFoxQueryEngine { + + protected DataStore store; + protected Parameters parameters = new Parameters(); + + public BasicQueryEngine(String name) { + super(name); + store = RDFoxQueryEngine.createDataStore(); + parameters.m_allAnswersInRoot = true; + parameters.m_useBushy = true; + } + + public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { + if (gap != null) { + materialise("lower program", dProgram.getLower().toString()); + String program = dProgram.getUpper().toString(); + try { + gap.compile(program); + gap.addBackTo(); + getDataStore().clearRulesAndMakeFactsExplicit(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + gap.clear(); + } finally { + } + } + else + materialise("upper program", dProgram.getUpper().toString()); + } + + public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) { + if (gap != null) { + materialise("lower program", dProgram.getLower().toString()); + String program = dProgram.getUpper().toString(); + try { + gap.compile(program); + gap.addBackTo(); + getDataStore().clearRulesAndMakeFactsExplicit(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + gap.clear(); + } + } + else + materialise("upper program", dProgram.getUpper().toString()); + + return 1; + } + + @Override + public AnswerTuples evaluate(String queryText) { + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); + } + + @Override + public AnswerTuples evaluate(String queryText, String[] answerVars) { + TupleIterator tupleIterator; + try { + tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); + } catch (JRDFStoreException e) { + e.printStackTrace(); + return null; + } + return new RDFoxAnswerTuples(answerVars, tupleIterator); + } + + @Override + public DataStore getDataStore() { + return store; + } + + @Override + public void dispose() { + store.dispose(); + } + + protected void outputClassAssertions(String filename) { + TupleIterator allTuples = null; + boolean redirect = false; + try { + allTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); + redirect = Utility.redirectCurrentOut(filename); + for (long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) + System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager.getQuotedTerm(allTuples.getResource(1))); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (redirect) Utility.closeCurrentOut(); + if (allTuples != null) allTuples.dispose(); + } + } + + public void outputInstance4BinaryPredicate(String iri, String filename) { + Utility.redirectCurrentOut(filename); + outputInstance4BinaryPredicate(iri); + Utility.closeCurrentOut(); + } + + public void outputInstance4BinaryPredicate(String iri) { + outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); + } + + public void outputInstanceNumbers(String filename) { + TupleIterator predicateTuples = null; + TupleIterator instanceTuples; + Set number = new HashSet(); + String predicate; + try { + predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); + for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { + predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); + instanceTuples = null; + try { + instanceTuples = getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); + long totalCount = 0; + for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { + totalCount += instanceTuples.getMultiplicity(); + } + number.add(predicate + " * " + totalCount); + } finally { + if (instanceTuples != null) instanceTuples.dispose(); + } + } + + predicateTuples.dispose(); + + predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); + for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { + predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); + instanceTuples = null; + try { + instanceTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); + ; + long totalCount = 0; + for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) + totalCount += instanceTuples.getMultiplicity(); + number.add(predicate + " * " + totalCount); + } finally { + if (instanceTuples != null) instanceTuples.dispose(); + } + } + + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (predicateTuples != null) predicateTuples.dispose(); + } + + Utility.redirectCurrentOut(filename); + String[] ordered = number.toArray(new String[0]); + Arrays.sort(ordered, new DLPredicateComparator()); + for (String line: ordered) System.out.println(line); + Utility.closeCurrentOut(); + + } + + public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); + iter.open(); + return iter; + } + + public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); + iter.open(); + return iter; + } + + public void setExpandEquality(boolean flag) { + parameters.m_expandEquality = flag; + } + + public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { + parameters.m_expandEquality = false; + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); + iter.open(); + parameters.m_expandEquality = true; + return iter; + } + + + public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { + return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); + } + + Set materialisedRules = new HashSet(); + + public String getUnusedRules(Collection clauses, boolean toUpdate) { + DLClause clause; + for (Iterator iter = clauses.iterator(); iter.hasNext(); ) { + if (materialisedRules.contains(clause = iter.next())) + iter.remove(); + else if (toUpdate) materialisedRules.add(clause); + } + + if (clauses.isEmpty()) return null; + + return Program.toString(clauses); + } + + public void outputMaterialisedRules() { + System.out.println(DLClauseHelper.toString(materialisedRules)); + } + + public void outputAnswers(String query) { + TupleIterator iter = null; + try { + iter = internal_evaluate(query); + System.out.println(query); + int arity = iter.getArity(); + for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { + for (int i = 0; i < arity; ++i) + System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); + System.out.println(); + } + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (iter != null) iter.dispose(); + } + } + + public void outputInstance4UnaryPredicate(String iri) { + outputAnswers("select ?x where { ?x " + + " <" + + iri + + "> .}"); + } + + public void outputSubjects(String p, String o) { + outputAnswers("select x where { ?x <" + p + "> <" + o + "> . }"); + } + + public void outputObjects(String s, String p) { + outputAnswers("select ?x where { <" + s + "> <" + p + "> ?x . }"); + } + + public void outputIDBFacts() { + TupleIterator iter = null; + try { + iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); + for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { + for (int i = 0; i < 3; ++i) + System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); + System.out.println(); + } + } catch (JRDFStoreException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } finally { + if (iter != null) iter.dispose(); + } + + } + + public void outputType4Individual(String iri) { + outputAnswers("select ?z where { <" + iri + "> " + Namespace.RDF_TYPE_QUOTED + " ?z }"); + } + + public int getSameAsNumber() { + TupleIterator iter = null; + int counter = 0; + try { + iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); + for (long multi = iter.open(); multi != 0; multi = iter.getNext()) + if (iter.getResourceID(0) != iter.getResourceID(1)) + ++counter; + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (iter != null) iter.dispose(); + } + return counter; + } + + private UFS equalityGroups = null; + + public UFS getEqualityGroups() { + if (equalityGroups != null) return equalityGroups; + + equalityGroups = new UFS(); + + TupleIterator answers = null; + try { + Timer t = new Timer(); + answers = internal_evaluate("select ?x ?z where {?x " + Namespace.EQUALITY_QUOTED + "?z . }"); + for (long multi = answers.open(); multi != 0; multi = answers.getNext()) { + if (answers.getResourceID(0) != answers.getResourceID(1)) + equalityGroups.merge(answers.getResource(0).m_lexicalForm, answers.getResource(1).m_lexicalForm); + } + Utility.logInfo("@Time to group individuals by equality: " + t.duration()); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (answers != null) answers.dispose(); + } + + return equalityGroups; + } + + public void clearRulesAndIDBFacts(Collection collection) { +// performDeletion(collection); + collection.clear(); + try { + store.clearRulesAndMakeFactsExplicit(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + + @SuppressWarnings("unused") + private void performDeletion(Collection collection) { + Utility.logInfo("Remove all rules, idb facts and added staff..."); + Timer timer = new Timer(); + TupleIterator iter = null; + try { + UpdateType ut = UpdateType.ScheduleForDeletion; + for (int[] t: collection) + store.addTriplesByResourceIDs(t, ut); + + iter = internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); + for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { + int[] triple = new int[3]; + for (int i = 0; i < 3; ++i) + triple[i] = iter.getResourceID(i); + store.addTriplesByResourceIDs(triple, ut); + } + store.applyReasoning(true); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } finally { + if (iter != null) iter.dispose(); + } + Utility.logInfo("Time for deletion: " + timer.duration()); + } + + +} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java new file mode 100644 index 0000000..c22902c --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java @@ -0,0 +1,24 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import java.util.Comparator; + +import uk.ac.ox.cs.pagoda.multistage.Normalisation; +import uk.ac.ox.cs.pagoda.rules.OverApproxExist; + +public class DLPredicateComparator implements Comparator { + + @Override + public int compare(String arg0, String arg1) { + int ret = type(arg0) - type(arg1); + if (ret != 0) return ret; + + return arg0.compareTo(arg1); + } + + private int type(String p) { + if (p.contains(OverApproxExist.negativeSuffix)) return 1; + if (p.contains(Normalisation.auxiliaryConceptPrefix)) return 2; + else return 0; + } + +} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java new file mode 100644 index 0000000..03d2b67 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQuery.java @@ -0,0 +1,95 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.LinkedList; +import java.util.Map.Entry; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.karma2.exception.IllegalInputQueryException; +import org.semanticweb.karma2.model.ConjunctiveQuery; +import org.semanticweb.karma2.model.cqparser.ConjunctiveQueryParser; +import uk.ac.ox.cs.pagoda.MyPrefixes; +import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; +import uk.ac.ox.cs.pagoda.hermit.RuleHelper; +import uk.ac.ox.cs.pagoda.util.Utility; + +public class KarmaQuery { + + StringBuffer queryBuffer; + + public KarmaQuery(String queryText) { + LinkedList answerVariables = new LinkedList(); + DLClause clause = DLClauseHelper.getQuery(queryText, answerVariables); + String clauseText = RuleHelper.getText(clause); +// clauseText = RuleHelper.abbreviateIRI(clauseText).replace(":-", "<-"); + clauseText = clauseText.replace(":-", "<-"); + queryBuffer = new StringBuffer(); + + clauseText = expandIRI4Arguments(clauseText); + + for (Entry entry : MyPrefixes.PAGOdAPrefixes.getPrefixIRIsByPrefixName().entrySet()) + if (clauseText.contains(entry.getKey())) { + if (queryBuffer.length() > 0) queryBuffer.append(',').append(Utility.LINE_SEPARATOR); + queryBuffer.append("prefix ").append(entry.getKey()).append(" <").append(entry.getValue()).append(">"); + } + if (queryBuffer.length() > 0) queryBuffer.append(Utility.LINE_SEPARATOR); + + queryBuffer.append("p("); + boolean first = true; + for (String var: answerVariables) { + if (first) first = false; + else queryBuffer.append(","); + + queryBuffer.append("?").append(var); + } + queryBuffer.append(")").append(clauseText.substring(0, clauseText.length() - 1)); + } + + private String expandIRI4Arguments(String clauseText) { + int leftIndex = clauseText.indexOf('('), rightIndex = clauseText.indexOf(')', leftIndex + 1); + String argsText, newArgsText; + while (leftIndex != -1) { + argsText = clauseText.substring(leftIndex + 1, rightIndex); + newArgsText = MyPrefixes.PAGOdAPrefixes.expandText(argsText); + clauseText = clauseText.replace(argsText, newArgsText); + + rightIndex += newArgsText.length() - argsText.length(); + leftIndex = clauseText.indexOf('(', rightIndex + 1); + rightIndex = clauseText.indexOf(')', leftIndex + 1); + } + + return clauseText; + } + + public ConjunctiveQuery getConjunctiveQuery() { + ConjunctiveQuery cq = null; + try { + cq = new ConjunctiveQueryParser(toString()).parse(); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IllegalInputQueryException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + Utility.logDebug("The query cannot be properly handled by KARMA."); + return null; + } + return cq; + } + + @Override + public String toString() { + return queryBuffer.toString(); + } + + static String sample = "prefix P0: , " + + "prefix rdf: , " + + "prefix owl: " + + "q(?0) <- owl:Thing(?0), P0:Person(?0)"; + +} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java new file mode 100644 index 0000000..f70dde9 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java @@ -0,0 +1,98 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.*; + +import org.semanticweb.karma2.*; +import org.semanticweb.karma2.clausifier.OntologyProcesser; +import org.semanticweb.karma2.exception.IllegalInputOntologyException; +import org.semanticweb.karma2.model.ConjunctiveQuery; +import org.semanticweb.owlapi.model.OWLOntology; + +import uk.ac.ox.cs.pagoda.query.*; +import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; +import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.store.DataStore; + +public class KarmaQueryEngine extends RDFoxQueryEngine { + + private MyKarma reasoner = null; + + String karmaDataFile = null, karmaRuleFile = null; + + public KarmaQueryEngine(String name) { + super(name); + +// int Base = 1 << 6; +// int index = (new Random().nextInt() % Base + Base) % Base; +// karmaDataFile = "karma_data" + index + ".ttl"; +// karmaRuleFile = "karma_rule" + index + ".dlog"; + karmaDataFile = Utility.TempDirectory + "karma_data.ttl"; + karmaRuleFile = Utility.TempDirectory + "karma_rule.dlog"; + + reasoner = new MyKarma(); + } + + public MyKarma getReasoner() { + return reasoner; + } + + public void processOntology(OWLOntology elhoOntology) { + try { + OntologyProcesser.transformOntology(elhoOntology, new File(karmaDataFile), new File(karmaRuleFile)); + } catch (IllegalInputOntologyException e) { + e.printStackTrace(); + } + } + + @Override + public void dispose() { + reasoner.dispose(); + } + + @Override + public AnswerTuples evaluate(String queryText) { + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], null); + } + + @Override + public AnswerTuples evaluate(String queryText, String[] answerVars) { + return evaluate(queryText, answerVars, null); + } + + public AnswerTuples evaluate(String queryText, AnswerTuples soundAnswerTuples) { + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0], soundAnswerTuples); + } + + public AnswerTuples evaluate(String queryText, String[] answerVars, AnswerTuples soundAnswerTuples) { + KarmaQuery karmaQuery = new KarmaQuery(queryText.replace("_:", "?")); + reasoner.setConcurrence(false); + ConjunctiveQuery cq = karmaQuery.getConjunctiveQuery(); + if (cq == null) return null; + Set answers = reasoner.answerCQ(cq, soundAnswerTuples, !queryText.contains("_:")); + return new AnswerTuplesImp(answerVars, answers); + } + + @Override + public DataStore getDataStore() { + return reasoner.getStore(); + } + + public void initialiseKarma() { + try { + reasoner.initializeData(new File(karmaDataFile)); + reasoner.materialise(new File(karmaRuleFile)); + + File tmp; + if (karmaDataFile != null && ((tmp = new File(karmaDataFile)).exists())) tmp.delete(); + if (karmaRuleFile != null && ((tmp = new File(karmaRuleFile)).exists())) tmp.delete(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + +} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java new file mode 100644 index 0000000..dd71809 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxAnswerTuples.java @@ -0,0 +1,100 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import org.semanticweb.HermiT.model.Constant; +import org.semanticweb.HermiT.model.Individual; +import org.semanticweb.HermiT.model.Term; + +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.model.GroundTerm; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; + +public class RDFoxAnswerTuples implements AnswerTuples { + + long multi; + TupleIterator m_iter; + String[] m_answerVars; + + public RDFoxAnswerTuples(String[] answerVars, TupleIterator iter) { + m_answerVars = answerVars; + m_iter = iter; + reset(); + } + + @Override + public boolean isValid() { + return multi != 0; + } + + @Override + public int getArity() { + try { + return m_iter.getArity(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + return -1; + } + } + + @Override + public void moveNext() { + try { + multi = m_iter.getNext(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + + @Override + public void dispose() { + m_iter.dispose(); + } + + protected void finalize() { + m_iter.dispose(); + } + + @Override + public AnswerTuple getTuple() { + return new AnswerTuple(m_iter, m_answerVars.length); + } + + @Override + public void reset() { + try { + multi = m_iter.open(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + + @Override + public boolean contains(AnswerTuple t) { + Utility.logError("Unsupported operation in RDFoxAnswerTuples"); + return false; + } + + @Override + public void remove() { + Utility.logError("Unsupported operation in RDFoxAnswerTuples"); + } + + @Override + public String[] getAnswerVariables() { + return m_answerVars; + } + + public static Term getHermitTerm(GroundTerm t) { + if (t instanceof uk.ac.ox.cs.JRDFox.model.Individual) { + uk.ac.ox.cs.JRDFox.model.Individual individual = (uk.ac.ox.cs.JRDFox.model.Individual) t; + return Individual.create(individual.getIRI()); + } + else { + uk.ac.ox.cs.JRDFox.model.Literal literal = ((uk.ac.ox.cs.JRDFox.model.Literal) t); + return Constant.create(literal.getLexicalForm(), literal.getDatatype().getIRI()); + } + } + +} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java new file mode 100644 index 0000000..30771ab --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java @@ -0,0 +1,110 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import java.io.File; +import java.util.Collection; + +import uk.ac.ox.cs.pagoda.MyPrefixes; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.reasoner.QueryEngine; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; +import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.Utility; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.Prefixes; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType; + +public abstract class RDFoxQueryEngine implements QueryEngine { + + public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; + + protected String name; + protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); + + public RDFoxQueryEngine(String name) { + this.name = name; + } + + public abstract DataStore getDataStore(); + + public abstract void dispose(); + + public void importRDFData(String fileName, String importedFile) { + if (importedFile == null || importedFile.isEmpty()) return ; + Timer t = new Timer(); + DataStore store = getDataStore(); + try { + long oldTripleCount = store.getTriplesCount(), tripleCount; + for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) + store.importTurtleFile(new File(file), prefixes); + tripleCount = store.getTriplesCount(); + Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); + store.clearRulesAndMakeFactsExplicit(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); + } + + public void materialise(String programName, String programText) { + if (programText == null) return ; + Timer t = new Timer(); + DataStore store = getDataStore(); + try { + long oldTripleCount = store.getTriplesCount(), tripleCount; +// store.addRules(new String[] {programText}); + store.importRules(programText); + store.applyReasoning(); + tripleCount = store.getTriplesCount(); + Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); + store.clearRulesAndMakeFactsExplicit(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + Utility.logDebug(name + " store finished the materialisation of " + programName + " in " + t.duration() + " seconds."); + } + + @Override + public void evaluate(Collection queryTexts, String answerFile) { + if (queryTexts == null) + return ; + + int queryID = 0; + AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); + AnswerTuples answerTuples; + Timer t = new Timer(); + try { + for (String query: queryTexts) { + t.reset(); + answerTuples = null; + try { + answerTuples = evaluate(query); + Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); + answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); + } finally { + if (answerTuples != null) answerTuples.dispose(); + } + } + } finally { + answerWriter.close(); + } + + Utility.logDebug("done computing query answers by RDFox."); + + } + + public static DataStore createDataStore() { + DataStore instance = null; + try { +// instance = new DataStore("par-head-n"); + instance = new DataStore(StoreType.NarrowParallelHead); + instance.setNumberOfThreads(matNoOfThreads); + instance.initialize(); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + return instance; + } + +} diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java new file mode 100644 index 0000000..2280b12 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java @@ -0,0 +1,249 @@ +package uk.ac.ox.cs.pagoda.reasoner.light; + +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Map; +import java.util.Queue; +import java.util.Set; + +import org.semanticweb.HermiT.model.AnnotatedEquality; +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.AtomicConcept; +import org.semanticweb.HermiT.model.AtomicRole; +import org.semanticweb.HermiT.model.Constant; +import org.semanticweb.HermiT.model.DLPredicate; +import org.semanticweb.HermiT.model.Equality; +import org.semanticweb.HermiT.model.Individual; +import org.semanticweb.HermiT.model.Inequality; +import org.semanticweb.HermiT.model.Term; +import org.semanticweb.HermiT.model.Variable; + +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.model.GroundTerm; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.model.Datatype; +import uk.ac.ox.cs.JRDFox.store.Dictionary; +import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; +import uk.ac.ox.cs.JRDFox.store.Resource; + +public class RDFoxTripleManager { + + UpdateType m_incrementally; +// boolean m_incrementally; + + DataStore m_store; + Dictionary m_dict; + Set triplesByTerm = new HashSet(); + + public RDFoxTripleManager(DataStore store, boolean incrementally) { + m_store = store; +// m_incrementally = incrementally; + if (incrementally) + m_incrementally = UpdateType.ScheduleForAddition; + else + m_incrementally = UpdateType.Add; + + try { + m_dict = store.getDictionary(); + resourceID = m_dict.resolveResources( + new String[] {Namespace.RDF_TYPE, Namespace.EQUALITY, Namespace.INEQUALITY}, + new int[] {Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value(), Datatype.IRI_REFERENCE.value()} + ); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + + public boolean isRdfTypeID(int id) { + return id == resourceID[0]; + } + + public void addTripleByID(int[] tuple) { + try { + m_store.addTriplesByResourceIDs(tuple, m_incrementally); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + + public void addTripleByTerm(Atom atom) { + try { + m_store.addTriples(getRDFoxTriple(atom), m_incrementally); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + } + + public static GroundTerm[] getRDFoxTriple(Atom instance) { + if (instance.getArity() == 1) + return new GroundTerm[] { + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), + uk.ac.ox.cs.JRDFox.model.Individual.RDF_TYPE, + uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicConcept) instance.getDLPredicate()).getIRI()) }; + else if (instance.getDLPredicate() instanceof Equality || instance.getDLPredicate() instanceof AnnotatedEquality) + return new GroundTerm[] { + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), + uk.ac.ox.cs.JRDFox.model.Individual.SAME_AS, + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) }; + else if (instance.getDLPredicate() instanceof Inequality) + return new GroundTerm[] { + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), + uk.ac.ox.cs.JRDFox.model.Individual.DIFFERENT_FROM, + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) }; + else + return new GroundTerm[] { + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(0)).getIRI()), + uk.ac.ox.cs.JRDFox.model.Individual.create(((AtomicRole) instance.getDLPredicate()).getIRI()), + uk.ac.ox.cs.JRDFox.model.Individual.create(((Individual) instance.getArgument(1)).getIRI()) }; + } + + int[] resourceID; // rdf:type, owl:sameAs, owl:differentFrom + + public int[] getInstance(Atom atom, Map assignment) { + DLPredicate p = atom.getDLPredicate(); + if (p instanceof Equality || p instanceof AnnotatedEquality) + return new int[] { + getResourceID(atom.getArgument(0), assignment), + resourceID[1], + getResourceID(atom.getArgument(1), assignment) + }; + else if (p instanceof Inequality) + return new int[] { + getResourceID(atom.getArgument(0), assignment), + resourceID[2], + getResourceID(atom.getArgument(1), assignment) + }; + else if (atom.getArity() == 1) + return new int[] { + getResourceID(atom.getArgument(0), assignment), + resourceID[0], + getResourceID(p) + }; + else + return new int[] { + getResourceID(atom.getArgument(0), assignment), + getResourceID(p), + getResourceID(atom.getArgument(1), assignment) + }; + } + + public String getRawTerm(int id) { + Resource[] res = new Resource[1]; + try { + m_dict.getResources(new int[] {id}, 0, 1, res); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + return getQuotedTerm(res[0]); + } + + Map predicateCache = new HashMap(); + + public int getResourceID(DLPredicate p) { + Integer id; + String name = p instanceof AtomicConcept ? ((AtomicConcept) p).getIRI() : ((AtomicRole) p).getIRI(); + if ((id = predicateCache.get(name)) != null) return id; + try { + predicateCache.put(name, id = resolveResource(name, Datatype.IRI_REFERENCE.value())); + + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + return id; + } + + public int getResourceID(String name) { + Integer id = null; + try { + id = resolveResource(name, Datatype.IRI_REFERENCE.value()); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + return id; + } + + private int resolveResource(String name, int type) throws JRDFStoreException { + String[] lexicalForms = new String[] {name}; + int[] types = new int[] {type}; + return m_dict.resolveResources(lexicalForms, types)[0]; + } + + Map termCache = new HashMap(); + Queue termList = new LinkedList(); + int sizeLimit = 10000; + + private int getResourceID(Term arg, Map assignment) { + while (termCache.size() > sizeLimit) + termCache.remove(termList.poll()); + + if (arg instanceof Variable) return assignment.get((Variable) arg); + Integer id = null; + if ((id = termCache.get(arg)) != null) + return id; + +// if (arg instanceof Individual) { + try { + if (arg instanceof Individual) + termCache.put(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())); + else if (arg instanceof Constant) + termCache.put(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))); + + } catch (JRDFStoreException e) { + e.printStackTrace(); + } +// } + + return id; + } + + private static int getDatatypeID(String uri) { + if (uri.equals("http://www.w3.org/2001/XMLSchema#string")) return Datatype.XSD_STRING.value(); + if (uri.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral")) return Datatype.RDF_PLAIN_LITERAL.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#integer")) return Datatype.XSD_INTEGER.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#float")) return Datatype.XSD_FLOAT.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#double")) return Datatype.XSD_DOUBLE.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#boolean")) return Datatype.XSD_BOOLEAN.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#dateTime")) return Datatype.XSD_DATE_TIME.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#time")) return Datatype.XSD_TIME.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#date")) return Datatype.XSD_DATE.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#gYearMonth")) return Datatype.XSD_G_YEAR_MONTH.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#gYear")) return Datatype.XSD_G_YEAR.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonthDay")) return Datatype.XSD_G_MONTH_DAY.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#gDay")) return Datatype.XSD_G_DAY.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#gMonth")) return Datatype.XSD_G_MONTH.value(); + if (uri.equals("http://www.w3.org/2001/XMLSchema#duration")) return Datatype.XSD_DURATION.value(); + + return -1; + } + + public int[] getResourceIDs(Collection individuals) { + String[] str = new String[individuals.size()]; + int[] types = new int[individuals.size()]; + int index = 0; + for (uk.ac.ox.cs.JRDFox.model.Individual individual : individuals) { + types[index] = Datatype.IRI_REFERENCE.value(); + str[index++] = individual.getIRI(); + } + + try { + return m_dict.resolveResources(str, types); + } catch (JRDFStoreException e) { + e.printStackTrace(); + return null; + } + } + + public static String getQuotedTerm(Resource r) { + if (r.m_datatype.equals(Datatype.IRI_REFERENCE)) + return OWLHelper.addAngles(r.m_lexicalForm); + if (r.m_datatype.equals(Datatype.XSD_STRING) || r.m_datatype.equals(Datatype.RDF_PLAIN_LITERAL)) + return "\"" + r.m_lexicalForm + "\""; + else + return "\"" + r.m_lexicalForm + "\"^^<" + r.m_datatype.getIRI() + ">"; + } + +} -- cgit v1.2.3