From 17bd9beaf7f358a44e5bf36a5855fe6727d506dc Mon Sep 17 00:00:00 2001 From: Federico Igne Date: Tue, 10 May 2022 18:17:06 +0100 Subject: [pagoda] Move project to Scala This commit includes a few changes: - The repository still uses Maven to manage dependency but it is now a Scala project. - The code has been ported from OWLAPI 3.4.10 to 5.1.20 - A proof of concept program using both RSAComb and PAGOdA has been added. --- .../uk/ac/ox/cs/hermit/HermitQueryReasoner.java | 200 +++++++++++++ src/test/java/uk/ac/ox/cs/hermit/HermitTester.java | 55 ++++ src/test/java/uk/ac/ox/cs/hermit/JAIR_HermiT.java | 129 +++++++++ src/test/java/uk/ac/ox/cs/jrdfox/Tester.java | 195 +++++++++++++ .../uk/ac/ox/cs/pagoda/approx/ApproxTester.java | 158 ++++++++++ .../uk/ac/ox/cs/pagoda/approx/ClauseTester.java | 38 +++ .../cs/pagoda/endomorph/DependencyGraphTest.java | 60 ++++ .../uk/ac/ox/cs/pagoda/global_tests/BugTests.java | 240 ++++++++++++++++ .../ox/cs/pagoda/global_tests/CostEvaluation.java | 115 ++++++++ .../ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java | 191 ++++++++++++ .../cs/pagoda/global_tests/JAIR_Scalability.java | 91 ++++++ .../ox/cs/pagoda/global_tests/LightEvaluation.java | 18 ++ .../ac/ox/cs/pagoda/global_tests/MadeUpCases.java | 77 +++++ .../cs/pagoda/global_tests/SkolemisationTests.java | 269 +++++++++++++++++ .../pagoda/global_tests/TestGapMappedToLower.java | 68 +++++ .../cs/pagoda/global_tests/TestPagodaDBPedia.java | 33 +++ .../ox/cs/pagoda/global_tests/TestPagodaFLY.java | 107 +++++++ .../ox/cs/pagoda/global_tests/TestPagodaLUBM.java | 92 ++++++ .../ox/cs/pagoda/global_tests/TestPagodaNPD.java | 38 +++ .../cs/pagoda/global_tests/TestPagodaReactome.java | 53 ++++ .../ox/cs/pagoda/global_tests/TestPagodaUOBM.java | 122 ++++++++ .../uk/ac/ox/cs/pagoda/hermit/TestRuleHelper.java | 319 +++++++++++++++++++++ .../uk/ac/ox/cs/pagoda/junit/ClauseTester.java | 183 ++++++++++++ .../uk/ac/ox/cs/pagoda/query/CheckAnswers.java | 51 ++++ .../approximators/TestSkolemTermsManager.java | 14 + .../uk/ac/ox/cs/pagoda/summary/SummaryTester.java | 140 +++++++++ .../java/uk/ac/ox/cs/pagoda/tester/OWLTester.java | 30 ++ .../uk/ac/ox/cs/pagoda/tester/PagodaTester.java | 318 ++++++++++++++++++++ .../java/uk/ac/ox/cs/pagoda/tester/Statistics.java | 60 ++++ .../ox/cs/pagoda/util/SimpleProgressBarTester.java | 16 ++ .../java/uk/ac/ox/cs/pagoda/util/TestUtil.java | 97 +++++++ 31 files changed, 3577 insertions(+) create mode 100644 src/test/java/uk/ac/ox/cs/hermit/HermitQueryReasoner.java create mode 100644 src/test/java/uk/ac/ox/cs/hermit/HermitTester.java create mode 100644 src/test/java/uk/ac/ox/cs/hermit/JAIR_HermiT.java create mode 100644 src/test/java/uk/ac/ox/cs/jrdfox/Tester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/approx/ApproxTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/approx/ClauseTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/endomorph/DependencyGraphTest.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/BugTests.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/CostEvaluation.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/MadeUpCases.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/SkolemisationTests.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestGapMappedToLower.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaDBPedia.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaFLY.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaNPD.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaReactome.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/hermit/TestRuleHelper.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/junit/ClauseTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/query/CheckAnswers.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/rules/approximators/TestSkolemTermsManager.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/summary/SummaryTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/tester/OWLTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/tester/PagodaTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/tester/Statistics.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/util/SimpleProgressBarTester.java create mode 100644 src/test/java/uk/ac/ox/cs/pagoda/util/TestUtil.java (limited to 'src/test/java/uk') diff --git a/src/test/java/uk/ac/ox/cs/hermit/HermitQueryReasoner.java b/src/test/java/uk/ac/ox/cs/hermit/HermitQueryReasoner.java new file mode 100644 index 0000000..957790f --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/hermit/HermitQueryReasoner.java @@ -0,0 +1,200 @@ +package uk.ac.ox.cs.hermit; + +import org.semanticweb.HermiT.Reasoner; +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.AtomicRole; +import org.semanticweb.owlapi.model.*; +import org.semanticweb.owlapi.model.parameters.Imports; +import org.semanticweb.owlapi.reasoner.Node; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.owl.QueryRoller; +import uk.ac.ox.cs.pagoda.query.QueryManager; +import uk.ac.ox.cs.pagoda.query.QueryRecord; +import uk.ac.ox.cs.pagoda.util.Timer; + +import java.io.*; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.*; + +public class HermitQueryReasoner { + + public static void main(String... args) throws FileNotFoundException, OWLOntologyCreationException, OWLOntologyStorageException { + if (args.length == 0) { +// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/lubm/lubm1_merged.owl", null, PagodaTester.lubm_query}; +// args = new String[] {"/users/yzhou/ontologies/uobm/uobm1_merged.owl", null, "/users/yzhou/ontologies/uobm/queries/standard.sparql"}; +// args = new String[] {"/users/yzhou/ontologies/fly/fly_anatomy_XP_with_GJ_FC_individuals.owl", null, "/users/yzhou/ontologies/fly/queries/fly.sparql"}; +// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/npd/npd-all-minus-datatype.owl", "/media/krr-nas-share/Yujiao/ontologies/npd/data/npd-data-dump-minus-datatype-new.ttl", "/users/yzhou/ontologies/npd/queries/atomic.sparql"}; +// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/npd/npd-all.owl", "/media/krr-nas-share/Yujiao/ontologies/npd/data/npd-data-dump-processed.ttl", "/users/yzhou/ontologies/npd/queries/atomic.sparql"}; +// args = new String[] {PagodaTester.dbpedia_tbox, PagodaTester.dbpedia_abox, PagodaTester.dbpedia_query}; +// args = new String[] {"/users/yzhou/ontologies/answersCorrectness/unsatisfiable.owl", null, "/users/yzhou/ontologies/answersCorrectness/unsatisfiable_queries.sparql"}; + +// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/cco-processed-noDPR-noDPD.ttl", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/graph sampling/sample_100.nt", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/queries/atomic_one_filtered.sparql", "../answersCorrectness-share/results/chembl/hermit_1p"}; + args = + new String[]{"/users/yzhou/temp/uniprot_debug/core-processed-noDis.owl", "/users/yzhou/temp/uniprot_debug/sample_1_removed.nt", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/queries/atomic_one.sparql", "../answersCorrectness-share/results/uniprot/hermit_1p"}; + } +// args = new String[] {"imported.owl", "", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/queries/atomic_one.sparql", "../answersCorrectness-share/results/uniprot/hermit_1p"}; } + + + PrintStream ps = args.length < 4 ? null : new PrintStream(new File(args[3])); + for (int i = 0; i < args.length; ++i) { + if (args[i] == null || args[i].equalsIgnoreCase("null")) args[i] = ""; + System.out.println("Argument " + i + ": " + args[i]); + } + +// PrintStream ps = null; // new PrintStream(new File("../answersCorrectness-share/results/reactome/ ")); + if (ps != null) System.setOut(ps); + + Timer t = new Timer(); + OWLOntology onto = OWLHelper.loadOntology(args[0]); + OWLOntologyManager man = onto.getOWLOntologyManager(); + + OWLDatatype date = man.getOWLDataFactory().getOWLDatatype(IRI.create("http://www.w3.org/2001/XMLSchema#date")); + + if (onto.containsEntityInSignature(date)) { + for (OWLOntology o: onto.getImportsClosure()) + for (OWLAxiom axiom: o.getAxioms()) + if (axiom.getDatatypesInSignature().contains(date)) { + System.out.println("The axiom: " + axiom + " is being ingored. "); + man.removeAxiom(onto, axiom); + } + man.saveOntology(onto, new FileOutputStream(args[0] = "tbox_hermit.owl")); + man.removeOntology(onto); + onto = OWLHelper.loadOntology(man, args[0]); + System.out.println("TBox processed in " + t.duration()); + } + + try { + onto = OWLHelper.getImportedOntology(onto, args[1]); + } catch (IOException e) { + e.printStackTrace(); + } + System.out.println("Ontology loaded in " + t.duration()); + System.out.println("ABox axioms:" + onto.getABoxAxioms(Imports.INCLUDED).size()); +// for (OWLOntology o: onto.getImportsClosure()) +// for (OWLAxiom axiom: o.getAxioms()) +// System.out.println(axiom); + + Reasoner hermit = new Reasoner(onto); + if (!hermit.isConsistent()) { + System.out.println("The ontology is inconsistent."); + return ; + } + System.out.println("Preprocessing DONE in " + t.duration()); + +// System.out.println(hermit.isConsistent()); + + QueryManager queryManager = new QueryManager(); + QueryRoller roller = new QueryRoller(onto.getOWLOntologyManager().getOWLDataFactory()); + int failedCounter = 0; + Timer total = new Timer(); + for (QueryRecord record: queryManager.collectQueryRecords(args[2])) { + if (Integer.parseInt(record.getQueryID()) < 10) continue; + if (total.duration() > 18000) { + System.out.println("Time out 5h."); + return ; + } + System.out.println("--------------------- Query " + record.getQueryID() + " -----------------------"); + System.out.println(record.getQueryText()); + ExecutorService exec = Executors.newSingleThreadExecutor(); + try { + Future succ = exec.submit(new QueryThread(record, onto, hermit, roller)); + try { + try { + if (record.getQueryID().equals("1")) + System.out.println(succ.get(60, TimeUnit.MINUTES)); + else + System.out.println(succ.get(20, TimeUnit.MINUTES)); + } catch (InterruptedException e) { +// e.printStackTrace(); + } catch (ExecutionException e) { +// e.printStackTrace(); + } catch (TimeoutException e) { +// e.printStackTrace(); + } + } finally { + if (succ.cancel(true)) { + System.out.println("Trying to cancel the current query thread " + (++failedCounter)); + } + } + } finally { + exec.shutdownNow(); + } + } + + if (ps != null) ps.close(); + System.exit(0); + } + +} + +class QueryThread implements Callable { + + QueryRecord record; + OWLOntology onto; + Reasoner hermit; + QueryRoller roller; + + public QueryThread(QueryRecord record2, OWLOntology onto2, Reasoner hermit2, QueryRoller roller2) { + record = record2; onto = onto2; hermit = hermit2; roller = roller2; + } + + @Override + public Boolean call() throws Exception { + Set answers = new HashSet(); + Timer t = new Timer(); + if (record.getDistinguishedVariables().length > 1) { + if (record.getDistinguishedVariables().length == 2 && record.getClause().getBodyLength() == 1) { + dealWithAtomicBinaryQuery(record.getClause().getBodyAtom(0), answers); + System.out.println("Query " + record.getQueryID() + " The number of answers: " + answers.size()); + System.out.println("Query " + record.getQueryID() + " Total time: " + t.duration()); + } + else { + System.out.println("Query " + record.getQueryID() + " The number of answers: Query cannot be processsed."); + System.out.println("Query " + record.getQueryID() + " Total time: Query cannot be processsed."); + } + return false; + } + + OWLClassExpression exp = null; + + try { + exp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]); + } catch (Exception e) { + System.out.println("Query " + record.getQueryID() + " The number of answers: Query cannot be processsed."); + System.out.println("Query " + record.getQueryID() + " Total time: Query cannot be processsed."); + return false; + } + System.out.println(exp); + for (Node node: hermit.getInstances(exp, false)) { + for (OWLIndividual ind: node.getEntities()) { + answers.add(ind.toStringID()); + } + } + System.out.println("Query " + record.getQueryID() + " The number of answers: " + answers.size()); + System.out.println("Query " + record.getQueryID() + " Total time: " + t.duration()); + return true; + } + + private void dealWithAtomicBinaryQuery(Atom bodyAtom, Set answers) { + StringBuilder sb = new StringBuilder(); + OWLDataFactory f = onto.getOWLOntologyManager().getOWLDataFactory(); + OWLObjectProperty p = f.getOWLObjectProperty(IRI.create(((AtomicRole) bodyAtom.getDLPredicate()).getIRI())); + for (Node sub: hermit.getInstances(f.getOWLObjectMinCardinality(1, p), false)) { + for (Node obj: hermit.getObjectPropertyValues(sub.getRepresentativeElement(), p)) { + for (OWLNamedIndividual subInd: sub.getEntities()) { + sb.setLength(0); + sb.append(subInd.toString()).append(" "); + int len = sb.length(); + for (OWLNamedIndividual objInd: obj.getEntities()) { + sb.setLength(len); + sb.append(objInd.toString()); + answers.add(sb.toString()); + } + } + } + } + } + +} + diff --git a/src/test/java/uk/ac/ox/cs/hermit/HermitTester.java b/src/test/java/uk/ac/ox/cs/hermit/HermitTester.java new file mode 100644 index 0000000..dc70284 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/hermit/HermitTester.java @@ -0,0 +1,55 @@ +package uk.ac.ox.cs.hermit; + +import org.semanticweb.HermiT.Configuration; +import org.semanticweb.HermiT.Reasoner; +import org.semanticweb.HermiT.model.Atom; +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.HermiT.model.DLOntology; +import org.semanticweb.HermiT.structural.OWLClausification; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLNamedIndividual; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.reasoner.Node; + +import uk.ac.ox.cs.pagoda.owl.OWLHelper; + +public class HermitTester { + + public static void main(String[] args) { + OWLOntology onto = OWLHelper.loadOntology("imported.owl"); + Reasoner hermit = new Reasoner(onto); + OWLDataFactory f = onto.getOWLOntologyManager().getOWLDataFactory(); + OWLClass concept = f.getOWLClass(IRI.create("http://semantics.crl.ibm.com/univ-bench-dl.owl#Query12")); + + for (OWLOntology o: onto.getImportsClosure()) { + System.out.println(o.containsEntityInSignature(concept)); + for (OWLAxiom axiom: o.getAxioms()) + if (axiom.getClassesInSignature().contains(concept)) + System.out.println(axiom); + } + + for (Node node : hermit.getInstances(concept, false)) + for (OWLNamedIndividual i: node.getEntities()) { + System.out.println(i.toStringID()); + } + +// clausifierTest(); + } + + @SuppressWarnings("unused") + private static void clausifierTest() { + OWLOntology onto = OWLHelper.loadOntology("/users/yzhou/ontologies/travel.owl"); + OWLClausification clausifier = new OWLClausification(new Configuration()); + DLOntology dlOntology = (DLOntology)clausifier.preprocessAndClausify(onto, null)[1]; + + for (DLClause clause: dlOntology.getDLClauses()) + System.out.println(clause); + for (Atom atom : dlOntology.getPositiveFacts()) + System.out.println(atom); + + } + +} diff --git a/src/test/java/uk/ac/ox/cs/hermit/JAIR_HermiT.java b/src/test/java/uk/ac/ox/cs/hermit/JAIR_HermiT.java new file mode 100644 index 0000000..72e7af8 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/hermit/JAIR_HermiT.java @@ -0,0 +1,129 @@ +package uk.ac.ox.cs.hermit; + +import org.junit.Test; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +public class JAIR_HermiT { + + @Test + public void lubm1() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), + TestUtil.combinePaths(ontoDir, "lubm/data/lubm1_owl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/answersCorrectness.sparql") +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/hermit" + }; + HermitQueryReasoner.main(args); + } + + @Test + public void lubm1_rolledUp() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + "/home/yzhou/backup/20141212/univ-bench-queries.owl", + TestUtil.combinePaths(ontoDir, "lubm/data/lubm1_owl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/atomic_lubm.sparql") +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/hermit_rolledUp" + }; + HermitQueryReasoner.main(args); + } + + @Test + public void uobm1() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), + TestUtil.combinePaths(ontoDir, "uobm/data/uobm1_owl_withDeclaration"), + TestUtil.combinePaths(ontoDir, "uobm/queries/standard.sparql") +// , "hermit_uobm1.out" +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/hermit" + }; + HermitQueryReasoner.main(args); + } + + @Test + public void uobm1_rolledUp() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + "/home/yzhou/backup/20141212/univ-bench-dl-queries.owl", + TestUtil.combinePaths(ontoDir, "uobm/data/uobm1_owl_withDeclaration"), + TestUtil.combinePaths(ontoDir, "uobm/queries/atomic_uobm.sparql") + , "hermit_uobm1_rolledUp.out" +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/hermit_rolledUp" + }; + HermitQueryReasoner.main(args); + } + + @Test + public void fly_rolledUp() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + HermitQueryReasoner.main( + TestUtil.combinePaths(ontoDir, "fly/fly-all-in-one_rolledUp.owl"), +// TestUtil.combinePaths(ontoDir, "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl", + null, + TestUtil.combinePaths(ontoDir, "fly/queries/fly_atomic.sparql") + , "hermit_fly.out" + ); + } + + @Test + public void npd() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + HermitQueryReasoner.main( + TestUtil.combinePaths(ontoDir, "npd/npd-all-minus-datatype.owl"), + TestUtil.combinePaths(ontoDir, "npd/data/npd-data-dump-minus-datatype-new.ttl"), + TestUtil.combinePaths(ontoDir, "npd/queries/atomic_ground.sparql") + , "hermit_npd.out" +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/npd/hermit" + ); + } + + @Test + public void dbpedia() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + HermitQueryReasoner.main( + TestUtil.combinePaths(ontoDir, "dbpedia/integratedOntology-all-in-one-minus-datatype.owl"), + TestUtil.combinePaths(ontoDir, "dbpedia/data/dbpedia-minus-datatype-new.ttl"), + TestUtil.combinePaths(ontoDir, "dbpedia/queries/atomic_ground.sparql") + , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/dbpedia/hermit" + ); + } + + @Test + public void reactome() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + HermitQueryReasoner.main( + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/reactome_sample_10.ttl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/atomic_ground.sparql") + , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/reactome/hermit_10p" + ); + } + + @Test + public void chembl() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + HermitQueryReasoner.main( + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/cco-noDPR.ttl"), +// null, + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/graph sampling/sample_1.nt"), + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql") + , "hermit_chembl.out" +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/chembl/hermit_1p" + ); + } + + @Test + public void uniprot() throws Exception { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + HermitQueryReasoner.main( + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/core-sat.owl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/graph sampling/sample_1.nt"), + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql") + , "hermit_uniprot.out" +// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uniprot/hermit_1p" + ); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/jrdfox/Tester.java b/src/test/java/uk/ac/ox/cs/jrdfox/Tester.java new file mode 100644 index 0000000..94f5401 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/jrdfox/Tester.java @@ -0,0 +1,195 @@ +package uk.ac.ox.cs.jrdfox; + +import java.io.File; + +import org.semanticweb.owlapi.model.OWLOntology; + +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.Prefixes; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; +import uk.ac.ox.cs.JRDFox.store.Parameters; +import uk.ac.ox.cs.JRDFox.store.TripleStatus; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; +import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine; +import uk.ac.ox.cs.pagoda.util.Namespace; +import uk.ac.ox.cs.pagoda.util.Timer; + +public class Tester { + + public static void main(String[] args) throws JRDFStoreException { + Tester tester = new Tester(); + tester.testCrash(); + } + + private void evaluate_againstIDs(String queryText) throws JRDFStoreException { + int number = 0; + Timer t = new Timer(); + TupleIterator iter = null; + try { + iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); + for (long multi = iter.open(); multi != 0; multi = iter.getNext()) + ++number; + } finally { + if (iter != null) iter.dispose(); + } + System.out.println(number); + System.out.println(t.duration()); + + } + + DataStore store; + Prefixes prefixes = new Prefixes(); + Parameters parameters; + + public Tester() { + try { + store = new DataStore(StoreType.NarrowParallelHead); + store.setNumberOfThreads(RDFoxQueryEngine.matNoOfThreads); + store.initialize(); + System.out.println("data store created."); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + parameters = new Parameters(); + parameters.m_allAnswersInRoot = true; + parameters.m_useBushy = true; + } + + public Tester(String path) { + try { + store = new DataStore(new File(path)); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + parameters = new Parameters(); +// parameters.m_allAnswersInRoot = true; +// parameters.m_useBushy = true; + } + + public void applyReasoning(boolean incremental) { + Timer t = new Timer(); + try { + store.applyReasoning(incremental); + } catch (JRDFStoreException e) { + e.printStackTrace(); + } + System.out.println("reasoning done: " + t.duration()); + } + + public void dispose() { + store.dispose(); + } + + public void testCrash() throws JRDFStoreException { +// DataStore lowerStore = new DataStore(StoreType.NarrowParallelHead); +// lowerStore.setNumberOfThreads(RDFoxQueryEngine.matNoOfThreads); +// lowerStore.initialize(); +// System.out.println("lower data store created."); + OWLOntology ontology = OWLHelper.loadOntology("data/fly/fly_anatomy_XP_with_GJ_FC_individuals.owl"); + System.out.println("ontology loaded ... " + ontology.getAxiomCount()); + + store.importTurtleFile(new File("testcase/fly.ttl")); + System.out.println("data loaded. " + store.getTriplesCount()); + + store.importRules(new File[] {new File("testcase/lower.dlog")}); + System.out.println("rules loaded. " + store.getTriplesCount()); + + store.applyReasoning(); + System.out.println("materialised. " + store.getTriplesCount()); + + store.clearRulesAndMakeFactsExplicit(); + + store.importRules(new File[] {new File("testcase/multi.dlog")}); + System.out.println("rules loaded. " + store.getTriplesCount()); + + store.applyReasoning(); + System.out.println("materialised. " + store.getTriplesCount()); + + store.makeFactsExplicit(); + + store.importTurtleFiles(new File[] {new File("testcase/first.ttl")}, UpdateType.ScheduleForAddition); + System.out.println("first data loaded. " + store.getTriplesCount()); + + store.applyReasoning(true); + System.out.println("incremental reasoning done. " + store.getTriplesCount()); + + store.clearRulesAndMakeFactsExplicit(); + + store.importTurtleFiles(new File[] {new File("testcase/second.ttl")}, UpdateType.ScheduleForAddition); + store.importRules(new File[] {new File("testcase/tracking.dlog")}, UpdateType.ScheduleForAddition); + store.applyReasoning(true); + System.out.println("incremental reasoning done. " + store.getTriplesCount()); + + evaluate_againstIDs("select distinct ?z where { ?x <" + Namespace.RDF_TYPE + "> ?z . }"); + System.out.println("done."); +// tester.applyReasoning(true); +// tester.evaluate_againstIDs("select distinct ?z where { ?x <" + Namespace.RDF_TYPE + "> ?z . }"); +// System.out.println("done."); + + store.dispose(); +// lowerStore.dispose(); + } + + public void test() throws JRDFStoreException { + evaluate("PREFIX benchmark: " + + "SELECT distinct ?x WHERE { " + + "?x a benchmark:Person . " + + "?x benchmark:like ?y . " + + "?z a benchmark:Chair . " + + "?z benchmark:isHeadOf . " + + "?z benchmark:like ?y . " + + "?x a . " + + "?z a . " + + "?y a }"); + + evaluate("PREFIX benchmark: " + + "SELECT distinct ?x WHERE { " + + "?x a benchmark:Person . " + + "?x benchmark:like ?y . " + + "?z a benchmark:Chair . " + + "?z benchmark:isHeadOf . " + + "?z benchmark:like ?y . " + + "?z a . " + + "?y a ." + + "?x a }"); + + evaluate("PREFIX benchmark: " + + "SELECT distinct ?x WHERE { " + + "?x a benchmark:Person . " + + "?x benchmark:like ?y . " + + "?z a benchmark:Chair . " + + "?z benchmark:isHeadOf . " + + "?z benchmark:like ?y . " + + "?y a . " + + "?x a . " + + "?z a }"); + + evaluate("PREFIX benchmark: " + + "SELECT distinct ?x WHERE { " + + "?x a benchmark:Person . " + + "?x benchmark:like ?y . " + + "?z a benchmark:Chair . " + + "?z benchmark:isHeadOf . " + + "?z benchmark:like ?y . " + + "?y a }"); + } + + public void evaluate(String query) throws JRDFStoreException { + int number = 0; + Timer t = new Timer(); + TupleIterator iter = null; + try { + iter = store.compileQuery(query, prefixes, parameters); + for (long multi = iter.open(); multi != 0; multi = iter.getNext()) + ++number; + } finally { + if (iter != null) iter.dispose(); + } + System.out.println(number); + System.out.println(t.duration()); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/approx/ApproxTester.java b/src/test/java/uk/ac/ox/cs/pagoda/approx/ApproxTester.java new file mode 100644 index 0000000..63fe7b7 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/approx/ApproxTester.java @@ -0,0 +1,158 @@ +package uk.ac.ox.cs.pagoda.approx; + +import java.io.IOException; + +import org.semanticweb.owlapi.model.OWLOntology; + +import uk.ac.ox.cs.pagoda.approx.KnowledgeBase; +import uk.ac.ox.cs.pagoda.approx.RLOntology; +import uk.ac.ox.cs.pagoda.approx.RLPlusOntology; +import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.rules.DisjunctiveProgram; +import uk.ac.ox.cs.pagoda.rules.ExistentialProgram; +import uk.ac.ox.cs.pagoda.rules.ExistentialToDisjunctive; +import uk.ac.ox.cs.pagoda.rules.GeneralProgram; +import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; +import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; +import uk.ac.ox.cs.pagoda.util.Utility; + +public class ApproxTester { + + private static ApproxType description = ApproxType.DATALOGPMOR; + + private static String ontoFile = null; + + public static void main(String[] args) throws IOException + { + args = new String[] { + "-tbox", +// "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/cco-noDPR.ttl", +// "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/reactome/biopax-level3-processed.owl", +// "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/atlas/gxaterms.owl", +// "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/core-sat-processed.owl", +// PagodaTester.npd_tbox, +// "/users/yzhou/temp/ontologies/core.RLor.rdf", + "datatype.owl", + "-dest", ApproxType.DATALOGPMOR.toString() + }; + + long startTime = System.currentTimeMillis(); + + if (args.length > 0) { + if (args.length % 2 != 0) { + System.out.println("arguments error..."); + return ; + } + for (int i = 0; i < args.length ; i = i + 2) + if (!setArgument(args[i], args[i + 1])) { + System.out.println("arguments error..."); + return ; + } + } + +// Utility.redirectSystemOut(); + + System.setProperty("entityExpansionLimit", String.valueOf(Integer.MAX_VALUE)); + + String directory = ontoFile.substring(0, ontoFile.lastIndexOf(Utility.FILE_SEPARATOR) + 1); + + KnowledgeBase program = null; + switch (description) { + case OWL2RLPLUS: program = new RLPlusOntology(); break; + case OWL2RL: program = new RLOntology(); break; + case DATALOG_UPPER: program = new UpperDatalogProgram(); break; + case DATALOG_LOWER: program = new LowerDatalogProgram(); break; + case EXISTENTIAL: program = new ExistentialProgram(); break; + case DISJUNCTIVE: program = new DisjunctiveProgram(); break; + case DATALOGPMOR: program = new GeneralProgram(); break; + case EXIST2DISJ: program = new ExistentialToDisjunctive(); break; + default: + System.exit(0); + } + + if (program instanceof RLPlusOntology) + ((RLPlusOntology) program).setCorrespondenceFileLoc(directory + "correspondence"); + OWLOntology ontology = OWLHelper.loadOntology(ontoFile); + program.load(ontology, new NullaryBottom()); + + program.transform(); + + program.save(); + + System.out.println("Time to transform the rules: " + (System.currentTimeMillis() - startTime) / 1000.); + + Utility.closeCurrentOut(); + } + + private static boolean setArgument(String key, String value) { + if (key.equalsIgnoreCase("-dest")) + if (value.equalsIgnoreCase("OWL2RL+")) description = ApproxType.OWL2RLPLUS; + else if (value.equalsIgnoreCase("OWL2RL")) description = ApproxType.OWL2RL; + else if (value.equalsIgnoreCase("UPPERDATALOG")) description = ApproxType.DATALOG_UPPER; + else if (value.equalsIgnoreCase("LOWERDATALOG")) description = ApproxType.DATALOG_LOWER; + else if (value.equalsIgnoreCase("DATALOGPMOR")) description = ApproxType.DATALOGPMOR; + else if (value.equalsIgnoreCase("EXISTENTIALRULES")) description = ApproxType.EXISTENTIAL; + else if (value.equalsIgnoreCase("DISJUNCTIVE")) description = ApproxType.DISJUNCTIVE; + else if (value.equalsIgnoreCase("EXIST2DISJ")) description = ApproxType.EXIST2DISJ; + else { + System.out.println("illegal destination argument..."); + return false; + } + else if (key.equalsIgnoreCase("-tbox")) + ontoFile = value; + else { + System.out.println("unrecognisable type of argument..."); + return false; + } + + return true; + } + + public enum ApproxType { + /** + * approx to (RL + self + top being the subClassExp) + */ + OWL2RLPLUS, + + /** + * approx to RL + */ + OWL2RL, + + /** + * approx to datalog by replacing existential quantified variables + * by fresh constants and replacing disjunctions by conjunctions + */ + DATALOG_UPPER, + + /** + * approx to datalog by ignoring existential and disjunctive axiom + */ + DATALOG_LOWER, + + /** + * approx to existential rules by replacing disjunctions by + * conjunctions + */ + EXISTENTIAL, + + /** + * approx to disjunctive datalog program by replacing existential + * quantified variables by fresh constants (DNF) + */ + DISJUNCTIVE, + + /** + * transform into rules, no approximation at all + */ + DATALOGPMOR, + + /** + * approx existential quantifiers by disjunctions + */ + EXIST2DISJ + + }; + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/approx/ClauseTester.java b/src/test/java/uk/ac/ox/cs/pagoda/approx/ClauseTester.java new file mode 100644 index 0000000..cff1d1c --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/approx/ClauseTester.java @@ -0,0 +1,38 @@ +package uk.ac.ox.cs.pagoda.approx; + +import org.semanticweb.HermiT.model.DLClause; +import org.semanticweb.owlapi.model.OWLOntology; + +import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.rules.GeneralProgram; + +public class ClauseTester { + + public static void main(String... args) { + args = new String[] { +// "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/cco-noDPR.ttl", + "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/reactome/biopax-level3-processed.owl", +// "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/atlas/gxaterms.owl", +// "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/core-sat-processed.owl", +// PagodaTester.npd_tbox, +// "/users/yzhou/temp/ontologies/core.RLor.rdf", +// "datatype.owl" + }; + + String ontoFile = args[0]; + OWLOntology ontology = OWLHelper.loadOntology(ontoFile); + GeneralProgram program = new GeneralProgram();; + program.load(ontology, new NullaryBottom()); + program.transform(); + program.save(); + if (program instanceof GeneralProgram) { + GeneralProgram gp = ((GeneralProgram) program); + for (DLClause clause: gp.getClauses()) { + System.out.println(clause); + System.out.println(OWLHelper.getOWLAxiom(ontology, clause)); + } + } + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/endomorph/DependencyGraphTest.java b/src/test/java/uk/ac/ox/cs/pagoda/endomorph/DependencyGraphTest.java new file mode 100644 index 0000000..a4579a3 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/endomorph/DependencyGraphTest.java @@ -0,0 +1,60 @@ +package uk.ac.ox.cs.pagoda.endomorph; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.testng.Assert; +import org.testng.annotations.Test; +import uk.ac.ox.cs.JRDFox.model.GroundTerm; +import uk.ac.ox.cs.JRDFox.model.Individual; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.summary.Graph; +import uk.ac.ox.cs.pagoda.summary.NodeTuple; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.util.HashSet; + +import static uk.ac.ox.cs.pagoda.util.TestUtil.getEntityIRI; + +public class DependencyGraphTest { + + private OWLOntology getOntology() throws OWLOntologyCreationException { + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass hardWorkingStudent = factory.getOWLClass(getEntityIRI("HardWorkingStudent")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(hardWorkingStudent)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLObjectProperty takesCourse = factory.getOWLObjectProperty(IRI.create(String.format(TestUtil.NS, "takesCourse"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(takesCourse)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(hardWorkingStudent, a)); // HardWorkingStudent(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(hardWorkingStudent, b)); // HardWorkingStudent(b) + + // Minimum cardinality axiom + manager.addAxiom(ontology, + factory.getOWLEquivalentClassesAxiom(hardWorkingStudent, + factory.getOWLObjectMinCardinality(3, + takesCourse))); + return ontology; + } + + @Test + public void test() throws OWLOntologyCreationException { + OWLOntology ontology = getOntology(); + Graph graph = new Graph(ontology); + DependencyGraph dependencyGraph = new DependencyGraph(graph); + + HashSet tuples = new HashSet<>(); + tuples.add(graph.getNodeTuple(new AnswerTuple(new GroundTerm[]{Individual.create(String.format(TestUtil.NS, "a")), Individual.create(String.format(TestUtil.NS, "a"))}))); + tuples.add(graph.getNodeTuple(new AnswerTuple(new GroundTerm[]{Individual.create(String.format(TestUtil.NS, "a")), Individual.create(String.format(TestUtil.NS, "b"))}))); + tuples.add(graph.getNodeTuple(new AnswerTuple(new GroundTerm[]{Individual.create(String.format(TestUtil.NS, "b")), Individual.create(String.format(TestUtil.NS, "a"))}))); + tuples.add(graph.getNodeTuple(new AnswerTuple(new GroundTerm[]{Individual.create(String.format(TestUtil.NS, "b")), Individual.create(String.format(TestUtil.NS, "b"))}))); + + dependencyGraph.build(tuples); + + Assert.assertTrue(dependencyGraph.getTopologicalOrder().size() >= 2); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/BugTests.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/BugTests.java new file mode 100644 index 0000000..3f14ec7 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/BugTests.java @@ -0,0 +1,240 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.testng.Assert; +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.query.QueryRecord; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +import static uk.ac.ox.cs.pagoda.util.TestUtil.getEntityIRI; + +public class BugTests { + + + + @Test + public void minimumCardinalityAxiom2() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + +// OWLClass student = factory.getOWLClass(getEntityIRI("Student")); +// manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(student)); +// OWLClass course = factory.getOWLClass(getEntityIRI("Course")); +// manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(course)); + OWLClass hardWorkingStudent = factory.getOWLClass(getEntityIRI("HardWorkingStudent")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(hardWorkingStudent)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLObjectProperty takesCourse = factory.getOWLObjectProperty(IRI.create(String.format(TestUtil.NS, "takesCourse"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(takesCourse)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(hardWorkingStudent, a)); // HardWorkingStudent(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(hardWorkingStudent, b)); // HardWorkingStudent(b) + + // Minimum cardinality axiom + manager.addAxiom(ontology, + factory.getOWLEquivalentClassesAxiom(hardWorkingStudent, + factory.getOWLObjectMinCardinality(3, + takesCourse))); + +// manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if (pagoda.preprocess()) { + String query = "select distinct ?x ?y " + + " where { " + + " ?x <" + takesCourse.toStringID() + "> _:z . " + + " ?y <" + takesCourse.toStringID() + "> _:z " + + " }"; + AnswerTuples answers = pagoda.evaluate(query); + int count = 0; + for (AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + count++; + } + Assert.assertEquals(count, 2); + } + pagoda.dispose(); + } + +// @Test + public void minimumCardinalityAxiom() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass student = factory.getOWLClass(getEntityIRI("Student")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(student)); + OWLClass course = factory.getOWLClass(getEntityIRI("Course")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(course)); + OWLClass hardWorkingStudent = factory.getOWLClass(getEntityIRI("HardWorkingStudent")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(hardWorkingStudent)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLNamedIndividual c1 = factory.getOWLNamedIndividual(getEntityIRI("c1")); + OWLNamedIndividual c2 = factory.getOWLNamedIndividual(getEntityIRI("c2")); + OWLNamedIndividual c3 = factory.getOWLNamedIndividual(getEntityIRI("c3")); + OWLNamedIndividual d1 = factory.getOWLNamedIndividual(getEntityIRI("d1")); + OWLNamedIndividual d2 = factory.getOWLNamedIndividual(getEntityIRI("d2")); + OWLNamedIndividual d3 = factory.getOWLNamedIndividual(getEntityIRI("d3")); + OWLObjectProperty takesCourse = factory.getOWLObjectProperty(IRI.create(String.format(TestUtil.NS, "takesCourse"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(takesCourse)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(student, a)); // Student(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(student, b)); // Student(b) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(course, c1)); // Course(c1) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(course, c2)); // Course(c2) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(course, c3)); // Course(c3) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(course, d1)); // Course(d1) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(course, d2)); // Course(d2) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(course, d3)); // Course(d3) + + // Role assertions + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(takesCourse, a, c1)); // takesCourse(a,c1) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(takesCourse, a, c2)); // takesCourse(a,c2) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(takesCourse, a, c3)); // takesCourse(a,c3) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(takesCourse, b, d1)); // takesCourse(b,d1) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(takesCourse, b, d2)); // takesCourse(b,d2) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(takesCourse, b, d3)); // takesCourse(b,d3) + + // Minimum cardinality axiom + manager.addAxiom(ontology, + factory.getOWLEquivalentClassesAxiom(hardWorkingStudent, + factory.getOWLObjectMinCardinality(3, + takesCourse))); + + manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if (pagoda.preprocess()) { + String query = "select distinct ?x ?y " + + " where { " + + " ?x <" + takesCourse.toStringID() + "> _:z . " + + " ?y <" + takesCourse.toStringID() + "> _:z " + + " }"; + AnswerTuples answers = pagoda.evaluate(query); + int count = 0; + for (AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + count++; + } + Assert.assertEquals(count, 2); + } + pagoda.dispose(); + } + + /** + * Bug: the relevant ontology is not a subset of the original one. + * + * @throws OWLOntologyCreationException + * @throws IOException + * @throws OWLOntologyStorageException + */ +// @Test + public void rTest() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass classA = factory.getOWLClass(getEntityIRI("A")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classA)); + OWLClass classB = factory.getOWLClass(getEntityIRI("B")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classB)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLNamedIndividual c = factory.getOWLNamedIndividual(getEntityIRI("c")); + OWLObjectProperty roleR = factory.getOWLObjectProperty(IRI.create(String.format(TestUtil.NS, "R"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleR)); + OWLObjectProperty roleP = factory.getOWLObjectProperty(IRI.create(String.format(TestUtil.NS, "P"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleP)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classA, a)); // A(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classA, b)); // A(b) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(roleP, c, a)); // P(c,a) + + // Axioms + // subsetOf(A someValuesFrom(R owl:Thing)) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(classA, + factory.getOWLObjectSomeValuesFrom(roleR, + factory.getOWLThing()))); + + // inverseFunctional(R) + manager.addAxiom(ontology, + factory.getOWLInverseFunctionalObjectPropertyAxiom(roleR)); + + // subsetOf(someValuesFrom(inverseOf(P) owl:thing) B) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(factory.getOWLObjectSomeValuesFrom(roleP.getInverseProperty(), + factory.getOWLThing()), + classB)); + /* + * Save the ontology + * */ + +// manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if(pagoda.preprocess()) { + String queryStr = "PREFIX rdf: " + + "select distinct ?x" + + " where { " + + " ?x rdf:type " + classB + + " }"; + QueryRecord queryRecord = pagoda.getQueryManager().create(queryStr); + System.out.println(queryRecord); + pagoda.evaluate(queryRecord); + AnswerTuples answers = queryRecord.getAnswers(); + System.out.println("Difficulty: " + queryRecord.getDifficulty()); + for(AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + } + } + pagoda.dispose(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/CostEvaluation.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/CostEvaluation.java new file mode 100644 index 0000000..01e8203 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/CostEvaluation.java @@ -0,0 +1,115 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.semanticweb.owlapi.model.OWLOntology; +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner.Type; +import uk.ac.ox.cs.pagoda.tester.PagodaTester; +import uk.ac.ox.cs.pagoda.util.TestUtil; +import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.Utility; + +public class CostEvaluation { + + @Test + public void lubm100() { + int number = 1; + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), + TestUtil.combinePaths(ontoDir, "lubm/data/lubm" + number + ".ttl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/test_all_pagoda.sparql") + ); +// AllTests.copy("output/log4j.log", "results-backup/jair/lubm" + number + ".out"); + } + + public void lubm1000() { + int number = 1000; + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), + TestUtil.combinePaths(ontoDir, "lubm/data/lubm" + number + ".ttl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/test_all_pagoda.sparql") + }; + OWLOntology ontology = OWLHelper.loadOntology(args[0]); + QueryReasoner reasoner = QueryReasoner.getInstance(Type.ELHOU, ontology, true, true); + Timer t = new Timer(); + reasoner.loadOntology(ontology); + reasoner.importData(args[1]); + if (!reasoner.preprocess()) + return ; + Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds."); + + reasoner.evaluate(reasoner.getQueryManager().collectQueryRecords(args[2])); +// AllTests.copy("output/log4j.log", "results-backup/jair/lubm" + number + ".out"); + } + + @Test + public void uobm5() { + int number = 1; + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), + TestUtil.combinePaths(ontoDir, "uobm/data/uobm" + number + ".ttl"), + TestUtil.combinePaths(ontoDir, "uobm/queries/standard_all_pagoda.sparql") + }; + PagodaTester.main(args); +// AllTests.copy("output/log4j.log", "results-backup/jair/uobm" + number + ".out"); + } + + public void uobm100() { + int number = 200; + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), + TestUtil.combinePaths(ontoDir, "uobm/data/uobm" + number + ".ttl"), + TestUtil.combinePaths(ontoDir, "uobm/queries/standard_group3_all.sparql") + }; + PagodaTester.main(args); +// AllTests.copy("output/log4j.log", "results-backup/jair/uobm" + number + ".out"); + } + + public void uobm500() { + int number = 500; + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), + TestUtil.combinePaths(ontoDir, "uobm/data/uobm" + number + ".ttl"), + TestUtil.combinePaths(ontoDir, "uobm/queries/standard_all_pagoda.sparql") + }; + + OWLOntology ontology = OWLHelper.loadOntology(args[0]); + QueryReasoner reasoner = QueryReasoner.getInstance(Type.ELHOU, ontology, true, true); + Timer t = new Timer(); + reasoner.loadOntology(ontology); + reasoner.importData(args[1]); + if (!reasoner.preprocess()) + return ; + Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds."); + + reasoner.evaluate(reasoner.getQueryManager().collectQueryRecords(args[2])); +// AllTests.copy("output/log4j.log", "results-backup/jair/uobm" + number + ".out"); + } + + + public static void main(String... args) { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + args = new String[] { + TestUtil.combinePaths(ontoDir, "dbpedia/integratedOntology-all-in-one-minus-datatype.owl"), + TestUtil.combinePaths(ontoDir, "dbpedia/data/dbpedia-minus-datatype-new.ttl"), + TestUtil.combinePaths(ontoDir, "dbpedia/queries/atomic_ground.sparql") + }; + + OWLOntology ontology = OWLHelper.loadOntology(args[0]); + QueryReasoner reasoner = QueryReasoner.getInstance(Type.ELHOU, ontology, true, true); + Timer t = new Timer(); + reasoner.loadOntology(ontology); + reasoner.importData(args[1]); + if (!reasoner.preprocess()) + return ; + Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds."); + + reasoner.evaluate(reasoner.getQueryManager().collectQueryRecords(args[2])); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java new file mode 100644 index 0000000..18f6cf9 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java @@ -0,0 +1,191 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.tester.PagodaTester; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; + +public class JAIR_PAGOdA { + + public static void main(String... args) { + try { + new JAIR_PAGOdA().lubm1(); + } catch(IOException e) { + e.printStackTrace(); + } + } + + @Test + public void lubm1() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), + TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/answersCorrectness.sparql") + }; + PagodaTester.main(args); + TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/pagoda"); + } + + @Test + public void lubm1_conj() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), + TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/test_pellet.sparql") + }; + PagodaTester.main(args); + TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/pagoda_conj"); + } + + @Test + public void lubm1_rolledUp() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + "/home/yzhou/backup/20141212/univ-bench-queries.owl", + TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"), + TestUtil.combinePaths(ontoDir, "lubm/queries/atomic_lubm.sparql") + ); + TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/pagoda_rolledUp"); + } + + @Test + public void uobm1() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), + TestUtil.combinePaths(ontoDir, "uobm/data/uobm1.ttl"), + TestUtil.combinePaths(ontoDir, "uobm/queries/standard.sparql") + }; + PagodaTester.main(args); + TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/pagoda"); + } + + @Test + public void uobm1_conj() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), + TestUtil.combinePaths(ontoDir, "uobm/data/uobm1.ttl"), + TestUtil.combinePaths(ontoDir, "uobm/queries/standard_pellet.sparql") + }; + PagodaTester.main(args); + TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/pagoda_conj"); + } + + @Test + public void uobm1_rolledUp() { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + "/home/yzhou/backup/20141212/univ-bench-dl-queries.owl", + TestUtil.combinePaths(ontoDir, "uobm/data/uobm1.ttl"), + TestUtil.combinePaths(ontoDir, "uobm/queries/atomic_uobm.sparql") + }; + PagodaTester.main(args); +// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/pagoda_rolledUp"); + } + + @Test + public void fly() { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl"), + null, + TestUtil.combinePaths(ontoDir, "fly/queries/fly_pellet.sparql") + }; + PagodaTester.main(args); +// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/fly/pagoda"); + } + + @Test + public void fly_conj() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl"), + null, + TestUtil.combinePaths(ontoDir, "fly/queries/fly_pellet.sparql") + }; + PagodaTester.main(args); + TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/fly/pagoda_conj"); + } + + public void fly_rolledUp() { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( +// TestUtil.combinePaths(ontoDir, "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl", + TestUtil.combinePaths(ontoDir, "fly/fly-all-in-one_rolledUp.owl"), + null, + TestUtil.combinePaths(ontoDir, "fly/queries/fly_atomic.sparql") + ); +// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/fly/pagoda_rolledUp"); + } + + public void dbpedia() { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + TestUtil.combinePaths(ontoDir, "dbpedia/integratedOntology-all-in-one-minus-datatype.owl"), + TestUtil.combinePaths(ontoDir, "dbpedia/data/dbpedia-minus-datatype-new.ttl"), + TestUtil.combinePaths(ontoDir, "dbpedia/queries/atomic_ground.sparql"), + "dbpedia.ans" + ); + +// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/dbpedia/pagoda"); + } + + public void npd() { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + TestUtil.combinePaths(ontoDir, "npd/npd-all-minus-datatype.owl"), + TestUtil.combinePaths(ontoDir, "npd/data/npd-data-dump-minus-datatype-new.ttl"), + TestUtil.combinePaths(ontoDir, "npd/queries/atomic_ground.sparql") + , "npd.ans" + ); + +// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/npd/pagoda"); + } + + public void reactome() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/reactome_sample_10.ttl"), +// null, +// TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/atomic_ground.sparql") + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/example.sparql") + , "pagoda_reactome.ans" + ); + TestUtil.copyFile("log4j.log", "output/jair/pagoda_reactome.example"); + +// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/reactome/pagoda_10p"); + } + + public void chembl() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/cco-noDPR.ttl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/graph sampling/sample_1.nt"), +// TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql") + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/example.sparql") + , "pagoda_chembl.ans" + ); + TestUtil.copyFile("log4j.log", "output/jair/pagoda_chembl.example"); +// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/chembl/pagoda_1p"); + } + + public void uniprot() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + PagodaTester.main( + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/core-sat-processed.owl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/graph sampling/sample_1.nt"), +// null, +// TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql") + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/example.sparql") + , "pagoda_uniprot.ans" + ); + TestUtil.copyFile("log4j.log", "output/jair/pagoda_uniprot.example"); +// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uniprot/pagoda_1p"); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java new file mode 100644 index 0000000..01f3568 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java @@ -0,0 +1,91 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.tester.PagodaTester; +import uk.ac.ox.cs.pagoda.util.PagodaProperties; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; + +public class JAIR_Scalability { + + private static final String date = "_0123"; + + public static void main(String... args) throws IOException { + PagodaProperties.shellModeDefault = true; + new JAIR_Scalability().testUniProt(50, false); + } + + @Test + public void reactome() throws IOException { + testReactome(10, false); + } + + @Test + public void chembl() throws IOException { + testChEMBL(1, false); + } + + @Test + public void uniprot() throws IOException { + testUniProt(1, false); + } + + public void testReactome(int percentage, boolean save) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/simplifed_sample_" + percentage + ".ttl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/answersCorrectness.sparql") + , "reactome.ans" + }; + if (percentage == 10) + args[1] = args[1].replace("simplifed", "reactome"); + + PagodaTester.main(args); + if (save) + TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/reactome/pagoda_" + percentage + "p" + date); + } + + public void testChEMBL(int percentage, boolean save) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/cco-noDPR.ttl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/sample_" + percentage + ".nt"), +// TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql") + TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/answersCorrectness.sparql") + , "chembl.ans" + }; + if (percentage == 1 || percentage == 10 || percentage == 50) + args[1] = args[1].replace("chembl", "chembl/graph sampling"); + else + if (percentage == 100) + args[1] = "/home/yzhou/RDFData/ChEMBL/facts/ChEMBL.ttl"; + + PagodaTester.main(args); + if (save) + TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/chembl/pagoda_" + percentage + "p" + date); + } + + public void testUniProt(int percentage, boolean save) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + String[] args = new String[] { + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/core-sat-processed.owl"), + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/sample_" + percentage + ".nt"), +// TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql") + TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/answersCorrectness.sparql") + , "uniprot.ans" + }; + + if (percentage == 1 || percentage == 10 || percentage == 50) + args[1] = args[1].replace("uniprot", "uniprot/graph sampling"); + else + if (percentage == 100) + args[1] = "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/data/uniprot_cleaned.nt"; + + PagodaTester.main(args); + if (save) + TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uniprot/pagoda_" + percentage + "p" + date); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java new file mode 100644 index 0000000..3551b9b --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java @@ -0,0 +1,18 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; + +import java.io.IOException; + +@Deprecated +public class LightEvaluation { + + @Test + public void evaluation() throws IOException { + new TestPagodaUOBM().answersCorrectness(1); + new TestPagodaLUBM().answersCorrectness(100); +// new TestPagodaFLY().answersCorrectness(); + new TestPagodaDBPedia().answersCorrectness(); + new TestPagodaNPD().testNPDwithoutDataType(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/MadeUpCases.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/MadeUpCases.java new file mode 100644 index 0000000..3d154cb --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/MadeUpCases.java @@ -0,0 +1,77 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.testng.Assert; +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; + +import static uk.ac.ox.cs.pagoda.util.TestUtil.getEntityIRI; + +public class MadeUpCases { + + @Test(groups = {"existential"}) + public void someTest() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass A1 = factory.getOWLClass(getEntityIRI("A1")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A1)); + OWLClass A2 = factory.getOWLClass(getEntityIRI("A2")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A2)); + OWLClass A3 = factory.getOWLClass(getEntityIRI("A3")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A3)); + OWLClass A4 = factory.getOWLClass(getEntityIRI("A4")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A4)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLObjectProperty R = factory.getOWLObjectProperty(IRI.create(String.format(TestUtil.NS, "R"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(R)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(A1, a)); + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(A1, b)); + + // Minimum cardinality axiom + manager.addAxiom(ontology, factory.getOWLSubClassOfAxiom(A1, factory.getOWLObjectSomeValuesFrom(R, A2))); + manager.addAxiom(ontology, factory.getOWLSubClassOfAxiom(A2, factory.getOWLObjectSomeValuesFrom(R, A3))); + manager.addAxiom(ontology, factory.getOWLSubClassOfAxiom(A3, factory.getOWLObjectSomeValuesFrom(R, A4))); + manager.addAxiom(ontology, factory.getOWLTransitiveObjectPropertyAxiom(R)); + +// manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if (pagoda.preprocess()) { + String query = "select distinct ?x ?y " + + " where { " + + " ?x <" + R.toStringID() + "> _:z . " + + " ?y <" + R.toStringID() + "> _:z " + + " }"; + AnswerTuples answers = pagoda.evaluate(query); + int count = 0; + for (AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + count++; + } + Assert.assertEquals(count, 2); + } + pagoda.dispose(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/SkolemisationTests.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/SkolemisationTests.java new file mode 100644 index 0000000..2fc682b --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/SkolemisationTests.java @@ -0,0 +1,269 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.testng.Assert; +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.query.QueryRecord; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; + +public class SkolemisationTests { + + public static final String NS = "http://example.org/test#%s"; + + private IRI getEntityIRI(String name) { + return IRI.create(String.format(NS, name)); + } + +// @Test + public void commonSuccessorTest() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass classA = factory.getOWLClass(getEntityIRI("A")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classA)); + OWLClass classB = factory.getOWLClass(getEntityIRI("B")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classB)); + OWLClass classC = factory.getOWLClass(getEntityIRI("C")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classC)); + OWLNamedIndividual c = factory.getOWLNamedIndividual(getEntityIRI("c")); + OWLNamedIndividual d = factory.getOWLNamedIndividual(getEntityIRI("d")); + OWLObjectProperty roleR = factory.getOWLObjectProperty(IRI.create(String.format(NS, "R"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleR)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classA, c)); // A(c) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classA, d)); // A(d) + + // Minimum cardinality axiom + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(classA, + factory.getOWLObjectUnionOf( + factory.getOWLObjectSomeValuesFrom(roleR, classB), + factory.getOWLObjectSomeValuesFrom(roleR, classC)))); + + /* + * Save the ontology + * */ + +// manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if(pagoda.preprocess()) { + String queryStr = "select distinct ?x ?y " + + " where { " + + " ?x <" + roleR.toStringID() + "> _:z . " + + " ?y <" + roleR.toStringID() + "> _:z " + + " }"; + QueryRecord queryRecord = pagoda.getQueryManager().create(queryStr); + pagoda.evaluate(queryRecord); + AnswerTuples answers = queryRecord.getAnswers(); + System.out.println("Difficulty: " + queryRecord.getDifficulty()); + int count = 0; + for(AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + count++; + } + Assert.assertEquals(count, 2); + } + pagoda.dispose(); + } + +// @Test + public void yTest() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass classA = factory.getOWLClass(getEntityIRI("A")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classA)); + OWLClass classB = factory.getOWLClass(getEntityIRI("B")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classB)); + OWLClass classC = factory.getOWLClass(getEntityIRI("C")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classC)); + OWLClass classD = factory.getOWLClass(getEntityIRI("D")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classD)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLNamedIndividual c = factory.getOWLNamedIndividual(getEntityIRI("c")); + OWLNamedIndividual d = factory.getOWLNamedIndividual(getEntityIRI("d")); + OWLObjectProperty roleR = factory.getOWLObjectProperty(IRI.create(String.format(NS, "R"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleR)); + OWLObjectProperty roleS = factory.getOWLObjectProperty(IRI.create(String.format(NS, "S"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleS)); + OWLObjectProperty roleP = factory.getOWLObjectProperty(IRI.create(String.format(NS, "P"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleP)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classD, a)); // D(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classD, b)); // D(b) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(roleS, c, a)); // S(c,a) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(roleP, d, b)); // P(d,b) + + // Axioms + // subsetOf(D someValuesFrom(R owl:Thing)) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(classD, + factory.getOWLObjectSomeValuesFrom(roleR, + factory.getOWLThing()))); + // subsetOf(someValuesFrom(inverseOf(S) owl:Thing) allValuesFrom(R A)) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(factory.getOWLObjectSomeValuesFrom(roleS.getInverseProperty(), + factory.getOWLThing()), + factory.getOWLObjectAllValuesFrom(roleR, classA))); + // subsetOf(someValuesFrom(inverseOf(P) owl:Thing) B) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(factory.getOWLObjectSomeValuesFrom(roleP.getInverseProperty(), + factory.getOWLThing()), + classB)); + // subsetOf(someValuesFrom(R A) C) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(factory.getOWLObjectSomeValuesFrom(roleR, classA), classC)); + + /* + * Save the ontology + * */ + + manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if(pagoda.preprocess()) { + String queryStr = "PREFIX rdf: " + + "select distinct ?x" + + " where { " +// + " ?x rdf:type " + classB + " ." +// + " ?x " + roleR + " "+ "_:y . " + + " ?x rdf:type " + classC + + " }"; + QueryRecord queryRecord = pagoda.getQueryManager().create(queryStr); + System.out.println(queryRecord); + pagoda.evaluate(queryRecord); + AnswerTuples answers = queryRecord.getAnswers(); + System.out.println("Difficulty: " + queryRecord.getDifficulty()); + int count = 0; + for(AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + count++; + } +// Assert.assertEquals(count, 1); + } + pagoda.dispose(); + } + + @Test + public void rTest() throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { + + /* + * Build test ontology + * */ + + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + + OWLClass classA = factory.getOWLClass(getEntityIRI("A")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classA)); + OWLClass classB = factory.getOWLClass(getEntityIRI("B")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classB)); +// OWLClass classC = factory.getOWLClass(getEntityIRI("C")); +// manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classC)); +// OWLClass classD = factory.getOWLClass(getEntityIRI("D")); +// manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(classD)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLNamedIndividual c = factory.getOWLNamedIndividual(getEntityIRI("c")); +// OWLNamedIndividual d = factory.getOWLNamedIndividual(getEntityIRI("d")); + OWLObjectProperty roleR = factory.getOWLObjectProperty(IRI.create(String.format(NS, "R"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleR)); +// OWLObjectProperty roleF = factory.getOWLObjectProperty(IRI.create(String.format(NS, "F"))); +// manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleF)); + OWLObjectProperty roleP = factory.getOWLObjectProperty(IRI.create(String.format(NS, "P"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleP)); +// OWLObjectProperty roleL = factory.getOWLObjectProperty(IRI.create(String.format(NS, "L"))); +// manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(roleL)); + + // Class assertions + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classA, a)); // A(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(classA, b)); // A(b) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(roleP, c, a)); // P(c,a) + + // Axioms + // subsetOf(A someValuesFrom(R owl:Thing)) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(classA, + factory.getOWLObjectSomeValuesFrom(roleR, + factory.getOWLThing()))); + + // inverseFunctional(R) + manager.addAxiom(ontology, + factory.getOWLInverseFunctionalObjectPropertyAxiom(roleR)); + + // subsetOf(someValuesFrom(inverseOf(P) owl:thing) B) + manager.addAxiom(ontology, + factory.getOWLSubClassOfAxiom(factory.getOWLObjectSomeValuesFrom(roleP.getInverseProperty(), + factory.getOWLThing()), + classB)); + /* + * Save the ontology + * */ + + manager.saveOntology(ontology, Files.newOutputStream(Paths.get("/home/alessandro/Desktop/test-ontology.owl"))); + + /* + * Test one query + * */ + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if(pagoda.preprocess()) { + String queryStr = "PREFIX rdf: " + + "select distinct ?x" + + " where { " + + " ?x rdf:type " + classB + + " }"; + QueryRecord queryRecord = pagoda.getQueryManager().create(queryStr); + System.out.println(queryRecord); + pagoda.evaluate(queryRecord); + AnswerTuples answers = queryRecord.getAnswers(); + System.out.println("Difficulty: " + queryRecord.getDifficulty()); + int count = 0; + for(AnswerTuple ans; answers.isValid(); answers.moveNext()) { + ans = answers.getTuple(); + TestUtil.logInfo(ans); + count++; + } +// Assert.assertEquals(count, 1); + } + pagoda.dispose(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestGapMappedToLower.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestGapMappedToLower.java new file mode 100644 index 0000000..4b1ec6d --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestGapMappedToLower.java @@ -0,0 +1,68 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import junit.framework.Assert; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.*; +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.util.Namespace; + +public class TestGapMappedToLower { + + public static final String ns = "http://example.org/test#%s"; + + public IRI getEntityIRI(String name) { + return IRI.create(String.format(ns, name)); + } + + @Test + public void test() throws OWLOntologyCreationException { + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLOntology ontology = manager.createOntology(); + OWLClass A = factory.getOWLClass(getEntityIRI("A")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A)); + OWLClass B = factory.getOWLClass(getEntityIRI("B")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(B)); + OWLClass C = factory.getOWLClass(getEntityIRI("C")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(C)); + OWLClass A1 = factory.getOWLClass(getEntityIRI("A1")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A1)); + OWLClass A2 = factory.getOWLClass(getEntityIRI("A2")); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(A2)); + OWLNamedIndividual a = factory.getOWLNamedIndividual(getEntityIRI("a")); + OWLNamedIndividual b = factory.getOWLNamedIndividual(getEntityIRI("b")); + OWLNamedIndividual c = factory.getOWLNamedIndividual(getEntityIRI("c")); + OWLObjectProperty r = factory.getOWLObjectProperty(IRI.create(String.format(ns, "r"))); + manager.addAxiom(ontology, factory.getOWLDeclarationAxiom(r)); + + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(A, a)); // A(a) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(B, b)); // B(b) + manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(C, c)); // C(c) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(r, a, b)); // r(a,b) + manager.addAxiom(ontology, factory.getOWLObjectPropertyAssertionAxiom(r, a, c)); // r(a,c) + manager.addAxiom(ontology, factory.getOWLSubClassOfAxiom(A, factory.getOWLObjectUnionOf(A1, A2))); // A \sqsubseteq A1 \sqcup A2 + manager.addAxiom(ontology, factory.getOWLSubClassOfAxiom(A1, factory.getOWLObjectMaxCardinality(1, r))); // A1 \sqsubseteq \leq 1 r.\top + manager.addAxiom(ontology, factory.getOWLSubClassOfAxiom(A2, factory.getOWLObjectMaxCardinality(1, r))); // A2 \sqsubseteq \leq 1 r.\top + + QueryReasoner pagoda = QueryReasoner.getInstance(ontology); + pagoda.loadOntology(ontology); + if (pagoda.preprocess()) { + String sparql = "select ?x where { " + + "?x <" + r.toStringID() + "> ?y . " + + "?y " + Namespace.RDF_TYPE_QUOTED + " <" + B.toStringID() + "> . " + + "?y " + Namespace.RDF_TYPE_QUOTED + " <" + C.toStringID() + "> . } "; + AnswerTuples rs = pagoda.evaluate(sparql); + int count = 0; + for (AnswerTuple ans; rs.isValid(); rs.moveNext()) { + ans = rs.getTuple(); + System.out.println(ans.getGroundTerm(0)); + ++count; + } + Assert.assertEquals(1, count); + } + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaDBPedia.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaDBPedia.java new file mode 100644 index 0000000..3642147 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaDBPedia.java @@ -0,0 +1,33 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.Pagoda; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; +import java.nio.file.Paths; + +public class TestPagodaDBPedia { + + public static final String ANSWER_PATH = "~/TestPagodaDEBPedia.json"; + + @Test + public void just_execute() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "dbpedia/integratedOntology-all-in-one-minus-datatype.owl")) + .data(Paths.get(ontoDir, "dbpedia/data/dbpedia-minus-datatype-new.ttl")) + .query(Paths.get(ontoDir, "dbpedia/atomic.sparql")) + .answer(ANSWER_PATH) + .classify(true) + .hermit(true) + .build(); + pagoda.run(); + } + + @Test + public void answersCorrectness() { + // TODO implement + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaFLY.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaFLY.java new file mode 100644 index 0000000..3eb956f --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaFLY.java @@ -0,0 +1,107 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.Pagoda; +import uk.ac.ox.cs.pagoda.query.CheckAnswers; +import uk.ac.ox.cs.pagoda.util.PagodaProperties; +import uk.ac.ox.cs.pagoda.util.TestUtil; +import uk.ac.ox.cs.pagoda.util.Timer; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; + +public class TestPagodaFLY { + + // @Test(groups = {"light"}) + public void answersCorrectness_withGJFC() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); + new File(answers.toString()).deleteOnExit(); + Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-fly-with-GJ-FC-individuals.json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl")) + .query(Paths.get(ontoDir, "fly/queries/fly.sparql")) + .answer(answers) + .classify(false) + .build(); + + pagoda.run(); + CheckAnswers.assertSameAnswers(answers, givenAnswers); + } + + @Test(groups = {"light", "correctness"}) + public void answersCorrectness_rolledUp() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); + new File(answers.toString()).deleteOnExit(); + Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-fly-rolledup.json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "fly/fly_rolledUp.owl")) + .query(Paths.get(ontoDir, "fly/queries/fly_rolledUp.sparql")) + .answer(answers) +// .answer(Paths.get("/home/alessandro/Desktop/answers.json")) + .classify(false) + .build(); + + pagoda.run(); + CheckAnswers.assertSameAnswers(answers, givenAnswers); + } + + @Test(groups = {"light", "justExecute", "nonOriginal", "existential"}) + public void justExecute_newQueries() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() +// .ontology(Paths.get(ontoDir, "fly/fly_rolledUp.owl")) + .ontology(Paths.get(ontoDir, "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl")) +// .query(Paths.get(ontoDir, "fly/queries/fly_rolledUp.sparql")) +// .query(Paths.get(ontoDir, "fly/queries/new_queries.sparql")) + .query("/home/alessandro/Desktop/query-8.sparql") +// .answer(Paths.get("/home/alessandro/Desktop/answers.json")) + .classify(false) + .hermit(true) + .skolemDepth(3) + .skolem(PagodaProperties.SkolemUpperBoundOptions.BEFORE_SUMMARISATION) + .build() + .run(); + } + + @Test(groups = {"comparison"}) + public void compare_newQueries() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Timer timer = new Timer(); + Pagoda.builder() + .ontology(Paths.get(ontoDir, "fly/fly_rolledUp.owl")) + .query(Paths.get(ontoDir, "fly/queries/new_queries.sparql")) + .classify(false) + .hermit(true) + .skolem(PagodaProperties.SkolemUpperBoundOptions.AFTER_SUMMARISATION) // <----<< Skolem upper bound is ENABLED <<< + .build() + .run(); + double t1 = timer.duration(); + + timer.reset(); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "fly/fly_rolledUp.owl")) + .query(Paths.get(ontoDir, "fly/queries/new_queries.sparql")) + .classify(false) + .hermit(true) + .skolem(PagodaProperties.SkolemUpperBoundOptions.DISABLED) // <----<< Skolem upper bound is DISABLED <<< + .build() + .run(); + double t2 = timer.duration(); + + if(t1 < t2) + TestUtil.logInfo( + "Overall reasoning with Skolem upper bound was " + (int) (t2 / t1 * 100 - 100) + "x faster!"); + else + TestUtil.logInfo( + "Overall reasoning with Skolem upper bound was " + (int) (t1 / t2 * 100 - 100) + "x slower..."); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java new file mode 100644 index 0000000..019fafa --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java @@ -0,0 +1,92 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.Pagoda; +import uk.ac.ox.cs.pagoda.query.CheckAnswers; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; + +public class TestPagodaLUBM { + + public void answersCorrectness(int number) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); + new File(answers.toString()).deleteOnExit(); + Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-lubm" + number + ".json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl")) + .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl")) + .query(Paths.get(ontoDir, "lubm/queries/test.sparql")) + .answer(answers) + .build(); + + pagoda.run(); + CheckAnswers.assertSameAnswers(answers, givenAnswers); + } + + @Test(groups = {"light", "correctness"}) + public void answersCorrectness_1() throws IOException { + answersCorrectness(1); + } + + public void justExecute_sygenia(int number) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); +// Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); +// new File(answers.toString()).deleteOnExit(); +// Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-lubm" + number + ".json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl")) + .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl")) + .query(Paths.get(ontoDir, "lubm/queries/lubm_sygenia.sparql")) +// .answer(answers) + .build(); + + pagoda.run(); +// CheckAnswers.assertSameAnswers(answers, givenAnswers); + } + + @Test(groups = {"sygenia"}) + public void justExecute_sygenia_1() throws IOException { + justExecute_sygenia(1); + } + + public void justExecute_sygenia_allBlanks(int number) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); +// Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); +// new File(answers.toString()).deleteOnExit(); +// Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-lubm" + number + ".json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl")) + .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl")) + .query(Paths.get(ontoDir, "lubm/queries/lubm_sygenia_all-blanks.sparql")) +// .answer(answers) + .build(); + + pagoda.run(); +// CheckAnswers.assertSameAnswers(answers, givenAnswers); + } + + @Test(groups = {"sygenia"}) + public void justExecute_sygenia_1_allBlanks() throws IOException { + justExecute_sygenia_allBlanks(1); + } + + @Test(groups = {"existential"}) + public void justExecute_feier() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl")) + .data(Paths.get(ontoDir, "lubm/data/lubm1.ttl")) + .query(Paths.get(ontoDir, "lubm/queries/queries_from_rules.sparql")) + .build() + .run(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaNPD.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaNPD.java new file mode 100644 index 0000000..939ee6e --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaNPD.java @@ -0,0 +1,38 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.Pagoda; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; +import java.nio.file.Paths; + +public class TestPagodaNPD { + + public static final String ANSWER_PATH = "~/PagodaNPDWithoutDatatype.json"; + + @Test + public void justExecuteNPDWithoutDataType() { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "npd/npd-all-minus-datatype.owl")) + .data(Paths.get(ontoDir, "npd/data/npd-data-dump-minus-datatype-new.ttl")) + .query(Paths.get(ontoDir, "npd/queries/atomic.sparql")) + .answer(ANSWER_PATH) + .classify(true) + .hermit(true) + .build(); + pagoda.run(); + } + + @Test + public void testNPDwithoutDataType() throws IOException { + // TODO implement + } + + @Test + public void testNPD() throws IOException { + // TODO implement + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaReactome.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaReactome.java new file mode 100644 index 0000000..8a8a596 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaReactome.java @@ -0,0 +1,53 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.Pagoda; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.IOException; +import java.nio.file.Paths; + +public class TestPagodaReactome { + + @Test(groups = {"justExecute"}) + public void justExecute() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "reactome/biopax-level3-processed.owl")) + .data(Paths.get(ontoDir, "reactome/data/sample_10.ttl")) + .query(Paths.get(ontoDir, "reactome/test.sparql")) + .classify(true) + .hermit(true) + .build() + .run(); + } + + @Test(groups = {"sygenia"}) + public void justExecute_sygenia() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "reactome/biopax-level3-processed.owl")) + .data(Paths.get(ontoDir, "reactome/data/sample_10.ttl")) + .query(Paths.get(ontoDir, "reactome/reactome_sygenia_queries.sparql")) + .classify(true) + .hermit(true) + .build() + .run(); + } + + @Test(groups = {"existential"}) + public void justExecute_existential() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "reactome/biopax-level3-processed.owl")) + .data(Paths.get(ontoDir, "reactome/data/sample_10.ttl")) + .query(Paths.get(ontoDir, "reactome/existential_queries.sparql")) +// .query(Paths.get(ontoDir, "reactome/Queries_by_Feier-et-al.sparql")) + .build() + .run(); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java new file mode 100644 index 0000000..4dae223 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java @@ -0,0 +1,122 @@ +package uk.ac.ox.cs.pagoda.global_tests; + +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.Pagoda; +import uk.ac.ox.cs.pagoda.query.CheckAnswers; +import uk.ac.ox.cs.pagoda.util.TestUtil; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; + +public class TestPagodaUOBM { + + private static final int N_1 = 1; + private static final int N_2 = 4; + + @DataProvider(name = "UOBMNumbers") + private static Object[][] UOBMNumbers() { + Integer[][] integers = new Integer[N_2 - N_1 + 1][1]; + for(int i = 0; i < N_2 - N_1 + 1; i++) + integers[i][0] = N_1 + i; + return integers; + } + + @Test(groups = {"light", "correctness"}) + public void answersCorrectness_1() throws IOException { + answersCorrectness(1); + } + + @Test(groups = {"heavy", "correctness"}, dataProvider = "UOBMNumbers") + public void answersCorrectness(int number) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); + new File(answers.toString()).deleteOnExit(); + Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-uobm" + number + ".json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl")) + .data(Paths.get(ontoDir, "uobm/data/uobm" + number + ".ttl")) + .query(Paths.get(ontoDir, "uobm/queries/test.sparql")) + .answer(answers) + .build(); + + pagoda.run(); + CheckAnswers.assertSameAnswers(answers, givenAnswers); + } + + @Test(groups = {"sygenia"}) + public void answersCorrectness_sygenia_1() throws IOException { + answersCorrectness_sygenia(1); + } + + @Test(groups = {"heavy",}, dataProvider = "UOBMNumbers") + public void answersCorrectness_sygenia(int number) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); +// Path answers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath()); +// new File(answers.toString()).deleteOnExit(); +// Path givenAnswers = TestUtil.getAnswersFilePath("answers/pagoda-uobm" + number + ".json"); + + Pagoda pagoda = Pagoda.builder() + .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl")) + .data(Paths.get(ontoDir, "uobm/data/uobm" + number + ".ttl")) + .query(Paths.get(ontoDir, "uobm/queries/uobm_sygenia.sparql")) + .build(); + + pagoda.run(); + } + + @Test(groups = {"sygenia"}) + public void answersCorrectness_sygenia_allBlanks_1() throws IOException { + answersCorrectness_sygenia(1); + } + + @Test(groups = {"heavy"}, dataProvider = "UOBMNumbers") + public void answersCorrectness_sygenia_allBlanks(int number) throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl")) + .data(Paths.get(ontoDir, "uobm/data/uobm" + number + ".ttl")) + .query(Paths.get(ontoDir, "uobm/queries/uobm_sygenia_all-blanks.sparql")) + .build() + .run(); + } + + @Test(groups = {"justExecute", "heavy", "nonOriginal", "existential"}) + public void justExecute_modifiedUOBM() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "uobm_modified/univ-bench-dl-modified.owl")) + .data(Paths.get(ontoDir, "uobm_modified/data/uobm1.ttl")) + .query(Paths.get(ontoDir, "uobm_modified/queries/additional_queries.sparql")) + .build() + .run(); + } + + @Test(groups = {"justExecute"}) + public void justExecute_additionalQueries() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + + Pagoda.builder() + .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl")) + .data(Paths.get(ontoDir, "uobm/data/uobm1.ttl")) + .query(Paths.get(ontoDir, "uobm/queries/additional_queries.sparql")) + .build() + .run(); + } + + @Test(groups = {"justExecute", "UOBM50"}) + public void justExecuteUOBM_50() throws IOException { + String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); + Pagoda.builder() + .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl")) + .data(Paths.get(ontoDir, "uobm/data/uobm50.ttl")) + .query(Paths.get(ontoDir, "uobm/queries/test.sparql")) + .build() + .run(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/hermit/TestRuleHelper.java b/src/test/java/uk/ac/ox/cs/pagoda/hermit/TestRuleHelper.java new file mode 100644 index 0000000..3a2cad8 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/hermit/TestRuleHelper.java @@ -0,0 +1,319 @@ +package uk.ac.ox.cs.pagoda.hermit; + +import org.testng.annotations.Test; +import uk.ac.ox.cs.pagoda.MyPrefixes; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.rules.DatalogProgram; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.nio.charset.Charset; + +public class TestRuleHelper { + + @Test + public static void someTest() { + String prefixes = "PREFIX anony: \n" + + "PREFIX aux: \n" + + "PREFIX owl: \n" + + "PREFIX prefix0: \n" + + "PREFIX rdf: \n" + + "PREFIX rdfs: \n" + + "PREFIX ruleml: \n" + + "PREFIX swrl: \n" + + "PREFIX swrlb: \n" + + "PREFIX swrlx: \n" + + "PREFIX xsd: \n"; + String rule = "owl:Nothing(?X) :- owl:Nothing2(?X).\n" + + "prefix0:WomanCollege(?X) :- prefix0:College(?X).\n" + + "owl:Nothing5(?X) :- prefix0:WomanCollege(?X), prefix0:hasStudent(?X,?Y), prefix0:Man(?Y).\n" + + "prefix0:SportsFan(?X) :- prefix0:Person(?X), prefix0:isCrazyAbout(?X,?Y), prefix0:Sports(?Y).\n" + + "prefix0:Organization(?X) :- prefix0:isAffiliateOf(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:lastName(?X,?Y).\n" + + "owl:sameAs(?Y1,?Y2) :- prefix0:isHeadOf(?Y1,?X), prefix0:isHeadOf(?Y2,?X).\n" + + "prefix0:isMemberOf(?Y,?X) :- prefix0:hasMember(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:hasMasterDegreeFrom(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:TeachingAssistant(?X).\n" + + "prefix0:Employee(?X) :- prefix0:Person(?X), prefix0:worksFor(?X,?Y), prefix0:Organization(?Y).\n" + + "prefix0:FineArts(?X) :- prefix0:Media_Arts_And_ciencesClass(?X).\n" + + "prefix0:FineArts(?X) :- prefix0:Medieval_ArtsClass(?X).\n" + + "prefix0:Professor(?X) :- prefix0:Chair(?X).\n" + + "prefix0:Faculty(?Y) :- prefix0:isTaughtBy(?X,?Y).\n" + + "prefix0:worksFor(?X,anony:individual25) :- prefix0:Employee(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Chemical_EngineeringClass(?X).\n" + + "prefix0:BaseballClass(anony:individual1) :- prefix0:BaseballLover(?X).\n" + + "prefix0:Course(?Y) :- prefix0:listedCourse(?X,?Y).\n" + + "prefix0:worksFor(?X,?Y) :- prefix0:isHeadOf(?X,?Y).\n" + + "prefix0:Faculty(?X) :- prefix0:teacherOf(?X,?Y).\n" + + "prefix0:Course(?Y) :- prefix0:teachingAssistantOf(?X,?Y).\n" + + "owl:Nothing(?X) :- owl:Nothing1(?X).\n" + + "prefix0:Person(?X) :- prefix0:Student(?X).\n" + + "prefix0:isFriendOf(?Y,?X) :- prefix0:isFriendOf(?X,?Y).\n" + + "prefix0:Student(?X) :- prefix0:UndergraduateStudent(?X).\n" + + "prefix0:Person(?X) :- prefix0:BasketBallLover(?X).\n" + + "prefix0:Person(?Y) :- prefix0:hasSameHomeTownWith(?X,?Y).\n" + + "prefix0:Employee(?X) :- prefix0:Faculty(?X).\n" + + "prefix0:Insterest(?X) :- prefix0:Music(?X).\n" + + "owl:Nothing1(?X) :- prefix0:NonScienceStudent(?X), prefix0:hasMajor(?X,?Y), prefix0:Science(?Y).\n" + + "owl:sameAs(?Y1,?Y2) :- prefix0:isTaughtBy(?X,?Y1), prefix0:isTaughtBy(?X,?Y2).\n" + + "prefix0:hasStudent(?Y,?X) :- prefix0:isStudentOf(?X,?Y).\n" + + "prefix0:Student(?X) :- prefix0:ScienceStudent(?X).\n" + + "prefix0:like(?X,anony:individual7) :- prefix0:PeopleWithHobby(?X).\n" + + "prefix0:Publication(?X) :- prefix0:Article(?X).\n" + + "prefix0:isTaughtBy(?Y,?X) :- prefix0:teacherOf(?X,?Y).\n" + + "prefix0:isCrazyAbout(?X,anony:individual19) :- prefix0:TennisFan(?X).\n" + + "prefix0:Science(?X) :- prefix0:Marine_ScienceClass(?X).\n" + + "prefix0:SupportingStaff(?X) :- prefix0:SystemsStaff(?X).\n" + + "prefix0:hasStudent(?X,anony:individual10) :- prefix0:College(?X).\n" + + "prefix0:FineArts(?X) :- prefix0:Theatre_and_DanceClass(?X).\n" + + "prefix0:Organization(?Y) :- prefix0:subOrganizationOf(?X,?Y).\n" + + "prefix0:Engineering(?X) :- prefix0:Materical_Science_EngineeringClass(?X).\n" + + "prefix0:hasMember(?Y,?X) :- prefix0:isMemberOf(?X,?Y).\n" + + "prefix0:Student(?Y) :- prefix0:hasStudent(?X,?Y).\n" + + "owl:Nothing(?X) :- owl:Nothing5(?X).\n" + + "prefix0:isCrazyAbout(?X,anony:individual18) :- prefix0:SwimmingFan(?X).\n" + + "prefix0:Publication(?Y) :- prefix0:orgPublication(?X,?Y).\n" + + "prefix0:Chair(?X) :- prefix0:Person(?X), prefix0:isHeadOf(?X,?Y), prefix0:Department(?Y).\n" + + "prefix0:isStudentOf(?Y,?X) :- prefix0:hasStudent(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:isAdvisedBy(?X,?Y).\n" + + "prefix0:hasMajor(?X,anony:individual2) :- prefix0:Student(?X).\n" + + "prefix0:Publication(?X) :- prefix0:publicationDate(?X,?Y).\n" + + "owl:Nothing(?X) :- owl:Nothing6(?X).\n" + + "prefix0:Director(?X) :- prefix0:Person(?X), prefix0:isHeadOf(?X,?Y), prefix0:Program(?Y).\n" + + "prefix0:Professor(?X) :- prefix0:AssociateProfessor(?X).\n" + + "prefix0:TeachingAssistant(?X) :- prefix0:Person(?X), prefix0:teachingAssistantOf(?X,?Y), prefix0:Course(?Y).\n" + + "prefix0:Research(?Y) :- prefix0:researchProject(?X,?Y).\n" + + "prefix0:TeachingAssistant(?X) :- prefix0:teachingAssistantOf(?X,?Y).\n" + + "prefix0:University(?X) :- prefix0:hasAlumnus(?X,?Y).\n" + + "prefix0:Faculty(?X) :- prefix0:Lecturer(?X).\n" + + "prefix0:isHeadOf(?X,anony:individual21) :- prefix0:Dean(?X).\n" + + "prefix0:like(?X,?Y) :- prefix0:isCrazyAbout(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:hasSameHomeTownWith(?X,?Y).\n" + + "prefix0:Organization(?X) :- prefix0:orgPublication(?X,?Y).\n" + + "prefix0:Person(?Y) :- prefix0:hasMember(?X,?Y).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:LinguisticsClass(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Aeronautical_EngineeringClass(?X).\n" + + "owl:Nothing(?X) :- owl:Nothing3(?X).\n" + + "prefix0:isHeadOf(?X,anony:individual12) :- prefix0:Director(?X).\n" + + "prefix0:Publication(?X) :- prefix0:publicationAuthor(?X,?Y).\n" + + "prefix0:Organization(?X) :- prefix0:College(?X).\n" + + "prefix0:isCrazyAbout(?X,anony:individual24) :- prefix0:BaseballFan(?X).\n" + + "prefix0:FineArts(?X) :- prefix0:Performing_ArtsClass(?X).\n" + + "prefix0:Sports(?X) :- prefix0:BasketBallClass(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:PsychologyClass(?X).\n" + + "prefix0:Person(?X) :- prefix0:Employee(?X).\n" + + "prefix0:FineArts(?X) :- prefix0:DramaClass(?X).\n" + + "prefix0:Faculty(?X) :- prefix0:PostDoc(?X).\n" + + "prefix0:University(?Y) :- prefix0:hasDoctoralDegreeFrom(?X,?Y).\n" + + "prefix0:Sports(anony:individual0) :- prefix0:SportsLover(?X).\n" + + "prefix0:Person(?Y) :- prefix0:hasAlumnus(?X,?Y).\n" + + "prefix0:FineArts(?X) :- prefix0:Modern_ArtsClass(?X).\n" + + "prefix0:hasMember(?X,?Y) :- prefix0:hasStudent(?X,?Y).\n" + + "prefix0:Course(?X) :- prefix0:isTaughtBy(?X,?Y).\n" + + "prefix0:NonScienceStudent(?X) :- prefix0:Student(?X).\n" + + "prefix0:BasketBallLover(?X) :- prefix0:Person(?X), prefix0:like(?X,?Y), prefix0:BasketBallClass(?Y).\n" + + "prefix0:Professor(?X) :- prefix0:VisitingProfessor(?X).\n" + + "prefix0:GraduateCourse(?Y) :- prefix0:GraduateStudent(?X), prefix0:takesCourse(?X,?Y).\n" + + "prefix0:TennisClass(anony:individual19) :- prefix0:TennisFan(?X).\n" + + "prefix0:Work(?X) :- prefix0:Research(?X).\n" + + "prefix0:TennisFan(?X) :- prefix0:Person(?X), prefix0:isCrazyAbout(?X,?Y), prefix0:TennisClass(?Y).\n" + + "prefix0:ScienceStudent(?X) :- prefix0:Student(?X), prefix0:hasMajor(?X,?Y), prefix0:Science(?Y).\n" + + "prefix0:Person(?X) :- prefix0:Woman(?X).\n" + + "prefix0:Man(?X) :- prefix0:Person(?X).\n" + + "prefix0:Person(?X) :- prefix0:hasUndergraduateDegreeFrom(?X,?Y).\n" + + "prefix0:ResearchGroup(?X) :- prefix0:researchProject(?X,?Y).\n" + + "prefix0:hasSameHomeTownWith(?X,?Z) :- prefix0:hasSameHomeTownWith(?X,?Y), prefix0:hasSameHomeTownWith(?Y,?Z).\n" + + "prefix0:Person(?X) :- prefix0:isFriendOf(?X,?Y).\n" + + "prefix0:Person(?Y) :- prefix0:publicationAuthor(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:Chair(?X).\n" + + "prefix0:Publication(?X) :- prefix0:Manual(?X).\n" + + "prefix0:Publication(?X) :- prefix0:UnofficialPublication(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Industry_EngineeringClass(?X).\n" + + "prefix0:Science(?X) :- prefix0:StatisticsClass(?X).\n" + + "prefix0:Organization(?Y) :- prefix0:isStudentOf(?X,?Y).\n" + + "prefix0:SwimmingFan(?X) :- prefix0:Person(?X), prefix0:isCrazyAbout(?X,?Y), prefix0:SwimmingClass(?Y).\n" + + "prefix0:Person(?X) :- prefix0:emailAddress(?X,?Y).\n" + + "prefix0:FineArts(?X) :- prefix0:Latin_ArtsClass(?X).\n" + + "prefix0:Organization(?X) :- prefix0:ResearchGroup(?X).\n" + + "prefix0:AcademicSubject(?X) :- prefix0:HumanitiesAndSocial(?X).\n" + + "prefix0:Professor(?X) :- prefix0:Dean(?X).\n" + + "prefix0:SwimmingClass(anony:individual8) :- prefix0:SwimmingLover(?X).\n" + + "prefix0:University(?Y) :- prefix0:hasMasterDegreeFrom(?X,?Y).\n" + + "prefix0:Article(?X) :- prefix0:ConferencePaper(?X).\n" + + "prefix0:Person(?X) :- prefix0:BasketBallFan(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:ReligionsClass(?X).\n" + + "prefix0:Science(?X) :- prefix0:PhysicsClass(?X).\n" + + "prefix0:Dean(?X) :- prefix0:isHeadOf(?X,?Y), prefix0:College(?Y).\n" + + "prefix0:University(?Y) :- prefix0:hasDegreeFrom(?X,?Y).\n" + + "prefix0:Organization(?X) :- prefix0:hasMember(?X,?Y).\n" + + "prefix0:Engineering(?X) :- prefix0:Computer_EngineeringClass(?X).\n" + + "prefix0:Publication(?X) :- prefix0:Software(?X).\n" + + "prefix0:Science(?X) :- prefix0:GeosciencesClass(?X).\n" + + "prefix0:hasMajor(?X,anony:individual9) :- prefix0:ScienceStudent(?X).\n" + + "prefix0:hasDegreeFrom(?Y,?X) :- prefix0:hasAlumnus(?X,?Y).\n" + + "prefix0:Software(?X) :- prefix0:softwareDocumentation(?X,?Y).\n" + + "prefix0:isMemberOf(?X,?Y) :- prefix0:isStudentOf(?X,?Y).\n" + + "prefix0:Organization(?X) :- prefix0:hasStudent(?X,?Y).\n" + + "owl:Nothing2(?X) :- prefix0:GraduateCourse(?X), prefix0:GraduateCourse_neg(?X).\n" + + "prefix0:BaseballFan(?X) :- prefix0:Person(?X), prefix0:isCrazyAbout(?X,?Y), prefix0:BaseballClass(?Y).\n" + + "prefix0:Publication(?X) :- prefix0:publicationResearch(?X,?Y).\n" + + "prefix0:like(?X,anony:individual14) :- prefix0:PeopleWithManyHobbies(?X).\n" + + "prefix0:SportsLover(?X) :- prefix0:Person(?X), prefix0:like(?X,?Y), prefix0:Sports(?Y).\n" + + "prefix0:Organization(?X) :- prefix0:University(?X).\n" + + "prefix0:hasAlumnus(?Y,?X) :- prefix0:hasDegreeFrom(?X,?Y).\n" + + "prefix0:Science(?X) :- prefix0:Materials_ScienceClass(?X).\n" + + "prefix0:Professor(?X) :- prefix0:tenured(?X,?Y).\n" + + "prefix0:Faculty(?X) :- prefix0:Professor(?X).\n" + + "prefix0:Student(?X) :- prefix0:NonScienceStudent(?X).\n" + + "prefix0:Person(?X) :- prefix0:telephone(?X,?Y).\n" + + "prefix0:FineArts(?X) :- prefix0:ArchitectureClass(?X).\n" + + "prefix0:University(?Y) :- prefix0:hasUndergraduateDegreeFrom(?X,?Y).\n" + + "prefix0:Man(anony:individual10) :- prefix0:College(?X).\n" + + "prefix0:Person(?X) :- prefix0:Man(?X).\n" + + "prefix0:Person(?X) :- prefix0:title(?X,?Y).\n" + + "prefix0:subOrganizationOf(?X,?Z) :- prefix0:subOrganizationOf(?X,?Y), prefix0:subOrganizationOf(?Y,?Z).\n" + + "owl:sameAs(?Y1,?Y2) :- prefix0:like(?X,?Y1), prefix0:like(?X,?Y2).\n" + + "prefix0:takesCourse(?X,anony:individual4) :- prefix0:GraduateStudent(?X).\n" + + "prefix0:Sports(?X) :- prefix0:TennisClass(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Petroleuml_EngineeringClass(?X).\n" + + "prefix0:Organization(?X) :- prefix0:Institute(?X).\n" + + "prefix0:isCrazyAbout(?X,anony:individual16) :- prefix0:BasketBallFan(?X).\n" + + "prefix0:Science(?X) :- prefix0:BiologyClass(?X).\n" + + "prefix0:Person(?X) :- prefix0:SportsFan(?X).\n" + + "prefix0:Course(?X) :- prefix0:GraduateCourse(?X).\n" + + "prefix0:Person(?X) :- prefix0:Director(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:EconomicsClass(?X).\n" + + "prefix0:Person(?X) :- prefix0:BaseballLover(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:HistoryClass(?X).\n" + + "prefix0:FineArts(?X) :- prefix0:Asian_ArtsClass(?X).\n" + + "prefix0:isStudentOf(?X,?Y) :- prefix0:enrollIn(?X,?Y).\n" + + "prefix0:isHeadOf(?X,anony:individual20) :- prefix0:Chair(?X).\n" + + "prefix0:Person(?X) :- prefix0:PeopleWithHobby(?X).\n" + + "prefix0:Sports(anony:individual5) :- prefix0:SportsFan(?X).\n" + + "prefix0:Science(anony:individual9) :- prefix0:ScienceStudent(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Biomedical_EngineeringClass(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:Modern_LanguagesClass(?X).\n" + + "prefix0:like(?X,?Y) :- prefix0:love(?X,?Y).\n" + + "prefix0:hasStudent(?X,anony:individual11) :- prefix0:College(?X).\n" + + "prefix0:Science(?X) :- prefix0:ChemistryClass(?X).\n" + + "prefix0:Student(?X) :- prefix0:takesCourse(?X,?Y).\n" + + "prefix0:teacherOf(?Y,?X) :- prefix0:isTaughtBy(?X,?Y).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:AnthropologyClass(?X).\n" + + "prefix0:Person(?X) :- prefix0:hasDegreeFrom(?X,?Y).\n" + + "prefix0:Person(?X) :- prefix0:hasDoctoralDegreeFrom(?X,?Y).\n" + + "prefix0:Engineering(?X) :- prefix0:Electrical_EngineeringClass(?X).\n" + + "owl:differentFrom(anony:individual13,anony:individual14) :- prefix0:PeopleWithManyHobbies(?X).\n" + + "prefix0:Person(?X) :- prefix0:SportsLover(?X).\n" + + "prefix0:Organization(?X) :- prefix0:subOrganizationOf(?X,?Y).\n" + + "prefix0:SwimmingLover(?X) :- prefix0:Person(?X), prefix0:like(?X,?Y), prefix0:SwimmingClass(?Y).\n" + + "prefix0:BaseballLover(?X) :- prefix0:Person(?X), prefix0:like(?X,?Y), prefix0:BaseballClass(?Y).\n" + + "prefix0:Science(?X) :- prefix0:Computer_ScienceClass(?X).\n" + + "prefix0:Sports(?X) :- prefix0:SwimmingClass(?X).\n" + + "prefix0:Science(?X) :- prefix0:AstronomyClass(?X).\n" + + "prefix0:Work(?X) :- prefix0:Course(?X).\n" + + "prefix0:Science(?X) :- prefix0:MathematicsClass(?X).\n" + + "prefix0:AcademicSubject(?X) :- prefix0:Engineering(?X).\n" + + "prefix0:hasDegreeFrom(?X,?Y) :- prefix0:hasUndergraduateDegreeFrom(?X,?Y).\n" + + "prefix0:like(?X,anony:individual15) :- prefix0:PeopleWithManyHobbies(?X).\n" + + "prefix0:Sports(?X) :- prefix0:BaseballClass(?X).\n" + + "prefix0:Student(?X) :- prefix0:isStudentOf(?X,?Y).\n" + + "prefix0:Professor(?Y) :- prefix0:isAdvisedBy(?X,?Y).\n" + + "prefix0:SwimmingClass(anony:individual18) :- prefix0:SwimmingFan(?X).\n" + + "prefix0:like(?X,anony:individual22) :- prefix0:BasketBallLover(?X).\n" + + "prefix0:like(?X,anony:individual1) :- prefix0:BaseballLover(?X).\n" + + "prefix0:Schedule(?X) :- prefix0:listedCourse(?X,?Y).\n" + + "owl:Nothing6(?X) :- owl:differentFrom(?X,?X).\n" + + "prefix0:PeopleWithManyHobbies(?X) :- prefix0:like(?X,?Y3).\n" + + "prefix0:Course(anony:individual23) :- prefix0:TeachingAssistant(?X).\n" + + "prefix0:takesCourse(?X,anony:individual3) :- prefix0:takesCourse(?X,?Y).\n" + + "prefix0:love(?X,?Y) :- prefix0:like(?X,?Y).\n" + + "prefix0:AcademicSubject(?X) :- prefix0:Science(?X).\n" + + "prefix0:Person(?X) :- prefix0:ResearchAssistant(?X).\n" + + "prefix0:Insterest(?X) :- prefix0:Sports(?X).\n" + + "prefix0:Article(?X) :- prefix0:TechnicalReport(?X).\n" + + "prefix0:UndergraduateStudent(?Y) :- prefix0:WomanCollege(?X), prefix0:hasStudent(?X,?Y).\n" + + "prefix0:Department(anony:individual20) :- prefix0:Chair(?X).\n" + + "prefix0:Woman(?X) :- prefix0:Person(?X).\n" + + "owl:Nothing4(?X) :- prefix0:UndergraduateStudent(?X), prefix0:UndergraduateStudent_neg(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:HumanitiesClass(?X).\n" + + "prefix0:GraduateCourse_neg(anony:individual3) :- prefix0:takesCourse(?X,?Y).\n" + + "prefix0:Organization(?Y) :- prefix0:isAffiliatedOrganizationOf(?X,?Y).\n" + + "owl:Nothing(?X) :- owl:Nothing4(?X).\n" + + "prefix0:like(?X,anony:individual0) :- prefix0:SportsLover(?X).\n" + + "prefix0:Research(?Y) :- prefix0:publicationResearch(?X,?Y).\n" + + "prefix0:Professor(?X) :- prefix0:AssistantProfessor(?X).\n" + + "prefix0:Program(anony:individual12) :- prefix0:Director(?X).\n" + + "prefix0:isMemberOf(?X,?Y) :- prefix0:worksFor(?X,?Y).\n" + + "prefix0:Organization(anony:individual25) :- prefix0:Employee(?X).\n" + + "prefix0:hasDegreeFrom(?X,?Y) :- prefix0:hasDoctoralDegreeFrom(?X,?Y).\n" + + "prefix0:Person(?Y) :- prefix0:isAffiliateOf(?X,?Y).\n" + + "prefix0:Student(?X) :- prefix0:Person(?X), prefix0:isStudentOf(?X,?Y), prefix0:Organization(?Y).\n" + + "prefix0:PeopleWithHobby(?X) :- prefix0:Person(?X), prefix0:like(?X,?Y).\n" + + "prefix0:Organization(anony:individual17) :- prefix0:Student(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Mechanical_EngineeringClass(?X).\n" + + "prefix0:Employee(?X) :- prefix0:SupportingStaff(?X).\n" + + "prefix0:Organization(?X) :- prefix0:Department(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:PhilosophyClass(?X).\n" + + "prefix0:College(anony:individual21) :- prefix0:Dean(?X).\n" + + "prefix0:UndergraduateStudent_neg(anony:individual11) :- prefix0:College(?X).\n" + + "owl:Nothing3(?X) :- prefix0:Man(?X), prefix0:Woman(?X).\n" + + "owl:sameAs(?Y1,?Y2) :- prefix0:takesCourse(?X,?Y1), prefix0:LeisureStudent(?X), prefix0:takesCourse(?X,?Y2).\n" + + "prefix0:Organization(?X) :- prefix0:isAffiliatedOrganizationOf(?X,?Y).\n" + + "prefix0:isCrazyAbout(?X,anony:individual5) :- prefix0:SportsFan(?X).\n" + + "prefix0:Software(?X) :- prefix0:softwareVersion(?X,?Y).\n" + + "prefix0:Science(anony:individual2) :- prefix0:Student(?X).\n" + + "prefix0:SupportingStaff(?X) :- prefix0:ClericalStaff(?X).\n" + + "prefix0:Person(?X) :- prefix0:SwimmingLover(?X).\n" + + "prefix0:Person(?X) :- prefix0:age(?X,?Y).\n" + + "prefix0:BasketBallClass(anony:individual22) :- prefix0:BasketBallLover(?X).\n" + + "prefix0:like(?X,anony:individual8) :- prefix0:SwimmingLover(?X).\n" + + "prefix0:Person(?X) :- prefix0:firstName(?X,?Y).\n" + + "prefix0:Department(?Y) :- prefix0:enrollIn(?X,?Y).\n" + + "prefix0:Publication(?Y) :- prefix0:softwareDocumentation(?X,?Y).\n" + + "prefix0:hasDegreeFrom(?X,?Y) :- prefix0:hasMasterDegreeFrom(?X,?Y).\n" + + "prefix0:AcademicSubject(?Y) :- prefix0:hasMajor(?X,?Y).\n" + + "prefix0:Article(?X) :- prefix0:JournalArticle(?X).\n" + + "prefix0:Organization(?X) :- prefix0:Program(?X).\n" + + "prefix0:Course(?Y) :- prefix0:teacherOf(?X,?Y).\n" + + "prefix0:AcademicSubject(?X) :- prefix0:FineArts(?X).\n" + + "prefix0:Person(?X) :- prefix0:TennisFan(?X).\n" + + "prefix0:GraduateStudent(?X) :- prefix0:takesCourse(?X,?Y).\n" + + "prefix0:BasketBallFan(?X) :- prefix0:Person(?X), prefix0:isCrazyAbout(?X,?Y), prefix0:BasketBallClass(?Y).\n" + + "prefix0:Publication(?X) :- prefix0:Specification(?X).\n" + + "prefix0:worksFor(?X,anony:individual6) :- prefix0:ResearchAssistant(?X).\n" + + "prefix0:Person(?X) :- prefix0:SwimmingFan(?X).\n" + + "prefix0:BasketBallClass(anony:individual16) :- prefix0:BasketBallFan(?X).\n" + + "prefix0:Person(?X) :- prefix0:BaseballFan(?X).\n" + + "prefix0:Person(?Y) :- prefix0:isFriendOf(?X,?Y).\n" + + "prefix0:like(?X,anony:individual13) :- prefix0:PeopleWithManyHobbies(?X).\n" + + "owl:differentFrom(anony:individual14,anony:individual15) :- prefix0:PeopleWithManyHobbies(?X).\n" + + "prefix0:hasSameHomeTownWith(?Y,?X) :- prefix0:hasSameHomeTownWith(?X,?Y).\n" + + "prefix0:ResearchGroup(anony:individual6) :- prefix0:ResearchAssistant(?X).\n" + + "prefix0:College(?X) :- prefix0:WomanCollege(?X).\n" + + "prefix0:BaseballClass(anony:individual24) :- prefix0:BaseballFan(?X).\n" + + "owl:differentFrom(anony:individual13,anony:individual15) :- prefix0:PeopleWithManyHobbies(?X).\n" + + "prefix0:Publication(?X) :- prefix0:Book(?X).\n" + + "prefix0:Professor(?X) :- prefix0:FullProfessor(?X).\n" + + "prefix0:Engineering(?X) :- prefix0:Civil_EngineeringClass(?X).\n" + + "prefix0:isStudentOf(?X,anony:individual17) :- prefix0:Student(?X).\n" + + "prefix0:HumanitiesAndSocial(?X) :- prefix0:EnglishClass(?X).\n" + + "prefix0:teachingAssistantOf(?X,anony:individual23) :- prefix0:TeachingAssistant(?X).\n" + + "prefix0:woman(?X) | prefix0:man(?X) :- prefix0:human(?X).\n" + + "prefix0:FineArts(?X) :- prefix0:MusicsClass(?X).\n"; + + for(String line: prefixes.split("\n")) { + String[] split = line.split(" "); + MyPrefixes.PAGOdAPrefixes.declarePrefix(split[1], OWLHelper.removeAngles(split[2])); + } + + InputStream is = new ByteArrayInputStream(rule.getBytes(Charset.defaultCharset())); + DatalogProgram datalogProgram = new DatalogProgram(is); + System.out.println(">> General <<"); + System.out.println(datalogProgram.getGeneral().toString()); + System.out.println(">> Lower <<"); + System.out.println(datalogProgram.getLower().toString()); + System.out.println(">> Upper <<"); + System.out.println(datalogProgram.getUpper().toString()); + System.out.flush(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/junit/ClauseTester.java b/src/test/java/uk/ac/ox/cs/pagoda/junit/ClauseTester.java new file mode 100644 index 0000000..ad4b2de --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/junit/ClauseTester.java @@ -0,0 +1,183 @@ +package uk.ac.ox.cs.pagoda.junit; + +import org.junit.Test; +import org.semanticweb.HermiT.model.*; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import uk.ac.ox.cs.pagoda.approx.Clause; +import uk.ac.ox.cs.pagoda.approx.Clausifier; + +import static org.junit.Assert.fail; + +public class ClauseTester { + + public void test_clause(Atom[] headAtoms, Atom[] bodyAtoms) { + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology emptyOntology = null; + try { + emptyOntology = m.createOntology(); + } catch(Exception e) { + e.printStackTrace(); + fail("failed to create a new ontology"); + } + Clause c = new Clause(Clausifier.getInstance(emptyOntology), DLClause.create(headAtoms, bodyAtoms)); + System.out.println(c.toString()); + } + + @Test + public void test_nominal() { + Variable x = Variable.create("X"); + AtomicRole r = AtomicRole.create("r"); + Individual o = Individual.create("o"); + Atom[] bodyAtoms = new Atom[]{Atom.create(r, x, o)}; + AtomicConcept A = AtomicConcept.create("A"); + Atom[] headAtoms = new Atom[]{Atom.create(A, x)}; + test_clause(headAtoms, bodyAtoms); + } + + @Test + public void test_simple() { + Variable x = Variable.create("X"), y1 = Variable.create("y1"), y2 = Variable.create("y2"); + AtomicConcept A = AtomicConcept.create("A"); + AtomicRole r = AtomicRole.create("r"); + Atom[] bodyAtoms = new Atom[] { + Atom.create(A, x), + Atom.create(r, x, y1), + Atom.create(r, x, y2) + }; + + Atom[] headAtoms = new Atom[] { + Atom.create(Equality.INSTANCE, y1, y2) + }; + + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology emptyOntology = null; + try { + emptyOntology = m.createOntology(); + } catch (Exception e) { + e.printStackTrace(); + fail("failed to create a new ontology"); + } + Clause c = new Clause(Clausifier.getInstance(emptyOntology), DLClause.create(headAtoms, bodyAtoms)); + System.out.println(c.toString()); + } + + @Test + public void test_more() { + Variable x = Variable.create("X"), y1 = Variable.create("y1"), y2 = Variable.create("y2"), y3 = Variable.create("y3"); + AtomicConcept A = AtomicConcept.create("A"); + AtomicRole r = AtomicRole.create("r"); + Atom[] bodyAtoms = new Atom[] { + Atom.create(A, x), + Atom.create(r, x, y1), + Atom.create(r, x, y2), + Atom.create(r, x, y3), + }; + + Atom[] headAtoms = new Atom[] { + Atom.create(Equality.INSTANCE, y1, y2), + Atom.create(Equality.INSTANCE, y1, y3), + Atom.create(Equality.INSTANCE, y2, y3) + }; + + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology emptyOntology = null; + try { + emptyOntology = m.createOntology(); + } catch (Exception e) { + e.printStackTrace(); + fail("failed to create a new ontology"); + } + Clause c = new Clause(Clausifier.getInstance(emptyOntology), DLClause.create(headAtoms, bodyAtoms)); + System.out.println(c.toString()); + } + + @Test + public void test_inverse() { + Variable x = Variable.create("X"), y1 = Variable.create("y1"), y2 = Variable.create("y2"); + AtomicConcept A = AtomicConcept.create("A"); + AtomicRole r = AtomicRole.create("r"); + Atom[] bodyAtoms = new Atom[] { + Atom.create(A, x), + Atom.create(r, y1, x), + Atom.create(r, y2, x) + }; + + Atom[] headAtoms = new Atom[] { + Atom.create(Equality.INSTANCE, y1, y2) + }; + + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology emptyOntology = null; + try { + emptyOntology = m.createOntology(); + } catch (Exception e) { + e.printStackTrace(); + fail("failed to create a new ontology"); + } + Clause c = new Clause(Clausifier.getInstance(emptyOntology), DLClause.create(headAtoms, bodyAtoms)); + System.out.println(c.toString()); + } + + @Test + public void test_fillter() { + Variable x = Variable.create("X"), y1 = Variable.create("y1"), y2 = Variable.create("y2"); + AtomicConcept A = AtomicConcept.create("A"); + AtomicConcept B = AtomicConcept.create("B"); + AtomicRole r = AtomicRole.create("r"); + Atom[] bodyAtoms = new Atom[] { + Atom.create(A, x), + Atom.create(r, y1, x), + Atom.create(r, y2, x), + Atom.create(B, y1), + Atom.create(B, y2) + }; + + Atom[] headAtoms = new Atom[] { + Atom.create(Equality.INSTANCE, y1, y2) + }; + + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology emptyOntology = null; + try { + emptyOntology = m.createOntology(); + } catch (Exception e) { + e.printStackTrace(); + fail("failed to create a new ontology"); + } + Clause c = new Clause(Clausifier.getInstance(emptyOntology), DLClause.create(headAtoms, bodyAtoms)); + System.out.println(c.toString()); + } + + @Test + public void test_negFillter() { + Variable x = Variable.create("X"), y1 = Variable.create("y1"), y2 = Variable.create("y2"); + AtomicConcept A = AtomicConcept.create("A"); + AtomicConcept B = AtomicConcept.create("B"); + AtomicRole r = AtomicRole.create("r"); + Atom[] bodyAtoms = new Atom[] { + Atom.create(A, x), + Atom.create(r, y1, x), + Atom.create(r, y2, x) + }; + + Atom[] headAtoms = new Atom[] { + Atom.create(Equality.INSTANCE, y1, y2), + Atom.create(B, y1), + Atom.create(B, y2) + }; + + OWLOntologyManager m = OWLManager.createOWLOntologyManager(); + OWLOntology emptyOntology = null; + try { + emptyOntology = m.createOntology(); + } catch (Exception e) { + e.printStackTrace(); + fail("failed to create a new ontology"); + } + Clause c = new Clause(Clausifier.getInstance(emptyOntology), DLClause.create(headAtoms, bodyAtoms)); + System.out.println(c.toString()); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/query/CheckAnswers.java b/src/test/java/uk/ac/ox/cs/pagoda/query/CheckAnswers.java new file mode 100644 index 0000000..71730c3 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/query/CheckAnswers.java @@ -0,0 +1,51 @@ +package uk.ac.ox.cs.pagoda.query; + +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; +import org.testng.Assert; + +import java.io.BufferedReader; +import java.io.IOException; +import java.lang.reflect.Type; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Set; + +/** + * It provides auxiliary methods for checking answers. + */ +public class CheckAnswers { + + private CheckAnswers() { + } + + public static void assertSameAnswers(Path computedAnswersFile, Path givenAnswersFile) throws IOException { + BufferedReader computedReader = Files.newBufferedReader(computedAnswersFile); + BufferedReader givenReader = Files.newBufferedReader(givenAnswersFile); + + Gson gson = QueryRecord.GsonCreator.getInstance(); + + Type cqType = new TypeToken>() {}.getType(); + Set computedAnswersRecords = gson.fromJson(computedReader, cqType); + Set givenAnswersRecords = gson.fromJson(givenReader, cqType); + + for(QueryRecord computedAnswersRecord : computedAnswersRecords) { + Set givenAnswers = null; + for(QueryRecord givenAnswersRecord : givenAnswersRecords) { + if(givenAnswersRecord.queryID == computedAnswersRecord.queryID) { + givenAnswers = givenAnswersRecord.soundAnswerTuples; + break; + } + } + + Assert.assertNotNull(givenAnswers, "Missing given answer for query no. " + computedAnswersRecord.queryID); + + Set computedAnswers = computedAnswersRecord.soundAnswerTuples; + Assert.assertEquals(computedAnswers.size(), givenAnswers.size(), + "Different number of sound answers for query " + computedAnswersRecord.queryID + "!" + + "Expected " + givenAnswers.size() + ", got " + computedAnswers.size()); + Assert.assertEquals(computedAnswers, givenAnswers, + "Different sound answers for query " + computedAnswersRecord.queryID + "!"); + } + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/rules/approximators/TestSkolemTermsManager.java b/src/test/java/uk/ac/ox/cs/pagoda/rules/approximators/TestSkolemTermsManager.java new file mode 100644 index 0000000..6bd8f05 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/rules/approximators/TestSkolemTermsManager.java @@ -0,0 +1,14 @@ +package uk.ac.ox.cs.pagoda.rules.approximators; + +import org.testng.annotations.Test; + +/** + * Tests for the class SkolemTermsManager + */ +public class TestSkolemTermsManager { + + @Test + public void test() { + // TODO some test + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/summary/SummaryTester.java b/src/test/java/uk/ac/ox/cs/pagoda/summary/SummaryTester.java new file mode 100644 index 0000000..60c8ed4 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/summary/SummaryTester.java @@ -0,0 +1,140 @@ +package uk.ac.ox.cs.pagoda.summary; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.Scanner; + +import org.semanticweb.HermiT.Reasoner; +import org.semanticweb.owlapi.model.AxiomType; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; +import org.semanticweb.owlapi.model.parameters.Imports; + +import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; +import uk.ac.ox.cs.pagoda.owl.OWLHelper; +import uk.ac.ox.cs.pagoda.owl.QueryRoller; +import uk.ac.ox.cs.pagoda.query.QueryManager; +import uk.ac.ox.cs.pagoda.query.QueryRecord; +import uk.ac.ox.cs.pagoda.summary.Summary; + +public class SummaryTester { + + static String FILE_BREAK = System.getProperty("file.separator"); + static String LINE_BREAK = System.getProperty("line.separator"); + + public static void main(String[] args) throws Exception { +// String arg = "ontologies/claros/all-in-one-manually.owl"; +// String arg = "ontologies/claros/Claros.owl ontologies/claros/data"; + String arg = "../uobmGenerator/univ-bench-dl.owl " + + "../uobmGenerator/uobm1 " + //"a " + + "ontologies/uobm/queries/uobm_standard_less.sparql"; + + testSummarisedUpperBound(arg.split("\\ ")); + } + + /** + * args[0] ontology file location + * args[1] data directory + * args[2] sparql query file location + * + * @param args + * @throws OWLOntologyCreationException + * @throws FileNotFoundException + * @throws OWLOntologyStorageException + */ + public static void testSummarisedUpperBound(String[] args) throws OWLOntologyCreationException, FileNotFoundException, OWLOntologyStorageException { + OWLOntology onto = OWLHelper.loadOntology(args[0]); + try { + onto = OWLHelper.getImportedOntology(onto, args[1]); + } catch (IOException e) { + e.printStackTrace(); + } + + Summary sum = testSummary(onto); + System.out.println("Summarisation Done."); + + System.out.println(args[2]); + Scanner scanner = new Scanner(new File(args[2])); + OWLOntology summary = sum.getSummary(); + OWLDataFactory factory = summary.getOWLOntologyManager().getOWLDataFactory(); + QueryRoller r = new QueryRoller(factory); + OWLClassExpression summarisedQueryExp; + Reasoner reasoner = new Reasoner(summary); + QueryManager queryManager = new QueryManager(); + int upperBoundCounter, queryID = 0; + StringBuilder queryText = new StringBuilder(); + String[] vars; + + for (String line; ; ) { + queryText.setLength(0); + while (scanner.hasNextLine() && (line = scanner.nextLine()) != null && !line.startsWith("^[query")); + if (!scanner.hasNextLine()) break; + + while (scanner.hasNextLine() && (line = scanner.nextLine()) != null && !line.isEmpty()) + queryText.append(line).append(LINE_BREAK); + if (!scanner.hasNextLine()) break; + + System.out.println("------------ starting computing for Query " + ++queryID + "------------"); + + System.out.println(queryText); + + QueryRecord record = queryManager.create(queryText.toString(), queryID); + vars = record.getAnswerVariables(); + if (vars.length > 1) { + System.out.println("The query cannot be processed by HermiT ... More than one answer variable"); + continue; + } + + summarisedQueryExp = r.rollUp(DLClauseHelper.getQuery(sum.getSummary(record), null), vars[0]); + + upperBoundCounter = 0; + for (String representative: sum.getRepresentatives()) + if (reasoner.isEntailed(factory.getOWLClassAssertionAxiom(summarisedQueryExp, factory.getOWLNamedIndividual(IRI.create(representative))))) { + upperBoundCounter += sum.getGroup(representative).size(); + } + + System.out.println("There are " + upperBoundCounter + " individual(s) in the upper bound computed by summary."); + } + scanner.close(); + } + + public static Summary testSummary(OWLOntology ontology) throws OWLOntologyCreationException, FileNotFoundException { + Summary sum = new Summary(ontology); + + System.out.println("original ontology data: "); + outputStatistics(ontology); + + OWLOntology summary = sum.getSummary(); + + System.out.println("summarised ontology data: "); + outputStatistics(summary); + + try { + FileOutputStream out = new FileOutputStream("summary.owl"); + summary.getOWLOntologyManager().saveOntology(summary, out); + out.close(); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + return sum; + } + + private static void outputStatistics(OWLOntology onto) { + System.out.println("TBox: " + onto.getTBoxAxioms(Imports.INCLUDED).size() + + "\tRBox: " + onto.getRBoxAxioms(Imports.INCLUDED).size() + + "\tABox: " + onto.getABoxAxioms(Imports.INCLUDED).size()); + System.out.println("Class Assertions: " + onto.getAxiomCount(AxiomType.CLASS_ASSERTION, Imports.INCLUDED) + + "\tObject Property Assertions: " + onto.getAxiomCount(AxiomType.OBJECT_PROPERTY_ASSERTION, Imports.INCLUDED)); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/tester/OWLTester.java b/src/test/java/uk/ac/ox/cs/pagoda/tester/OWLTester.java new file mode 100644 index 0000000..25162c8 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/tester/OWLTester.java @@ -0,0 +1,30 @@ +package uk.ac.ox.cs.pagoda.tester; + +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.parameters.Imports; + +import uk.ac.ox.cs.pagoda.owl.OWLHelper; + +public class OWLTester { + + public static void main(String[] args) throws OWLOntologyCreationException { +// OWLOntology onto = OWLHelper.loadOntology("dbpedia_imported.owl"); + OWLOntology onto = OWLHelper.loadOntology("reactome_imported.owl"); + OWLOntologyManager manager = onto.getOWLOntologyManager(); +// OWLOntology data = manager.loadOntology(IRI.create("file:/media/krr-nas-share/Yujiao/ontologies/bio2rdf/reactome/graph\ sampling/sample_1_new.ttl")); +// System.out.println("data: " + data.getAxiomCount() + " " + data.getABoxAxioms(true).size()); + for (OWLOntology t: manager.getOntologies()) { + System.out.println(t.getOntologyID()); + System.out.println(t.getAxiomCount() + " " + onto.getABoxAxioms(Imports.INCLUDED).size()); + } + System.out.println("In closure: " + onto.getImportsClosure().size()); + for (OWLOntology t: onto.getImportsClosure()) + System.out.println(t.getOntologyID()); + + System.out.println(onto.getAxiomCount() + " " + onto.getABoxAxioms(Imports.INCLUDED).size()); + } + +} + diff --git a/src/test/java/uk/ac/ox/cs/pagoda/tester/PagodaTester.java b/src/test/java/uk/ac/ox/cs/pagoda/tester/PagodaTester.java new file mode 100644 index 0000000..d741d3b --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/tester/PagodaTester.java @@ -0,0 +1,318 @@ +package uk.ac.ox.cs.pagoda.tester; + +import uk.ac.ox.cs.pagoda.query.AnswerTuple; +import uk.ac.ox.cs.pagoda.query.AnswerTuples; +import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; +import uk.ac.ox.cs.pagoda.util.PagodaProperties; +import uk.ac.ox.cs.pagoda.util.Timer; +import uk.ac.ox.cs.pagoda.util.Utility; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Scanner; + +@Deprecated +public class PagodaTester { + + // public static final String onto_dir = "/media/RDFData/yzhou/"; +// public static final String onto_dir = "/users/yzhou/ontologies/"; +// public static final String onto_dir = "/home/scratch/yzhou/ontologies/"; + public static final String onto_dir = "/home/alessandro/Big_files/Ontologies/"; + + public static final String fly = onto_dir + "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl"; + public static final String fly_query = onto_dir + "fly/queries/fly.sparql"; + + public static final String test_tbox = onto_dir + "smallExampleFromAna/dummy.owl"; + public static final String test_abox = onto_dir + "smallExampleFromAna/initialABox.ttl"; + public static final String test_query = onto_dir + "smallExampleFromAna/queries.dlog"; + + public static final int lubm_number = 1; + public static final String lubm_tbox = onto_dir + "lubm/univ-bench.owl"; + public static final String lubm_abox = onto_dir + "lubm/data/lubm" + lubm_number + ".ttl"; + public static final String lubm_abox_copy = onto_dir + "lubm/data/lubm" + lubm_number + " (copy).ttl"; + public static final String lubm_query = onto_dir + "lubm/queries/test.sparql"; + public static final String lubm_query6 = onto_dir + "lubm/queries/test_q6.sparql"; + public static final String lubm_query20 = onto_dir + "lubm/queries/test_q16.sparql"; + + public static final int uobm_number = 1; + public static final String uobm_tbox = onto_dir + "uobm/univ-bench-dl.owl"; + public static final String uobm_abox = onto_dir + "uobm/data/uobm" + uobm_number + ".ttl"; + public static final String uobm_query = onto_dir + "uobm/queries/test.sparql"; + public static final String uobm_query_temp = onto_dir + "uobm/queries/temp.sparql"; + public static final String uobm_query2 = onto_dir + "uobm/queries/standard_q2.sparql"; + public static final String uobm_query9 = onto_dir + "uobm/queries/standard_q9.sparql"; + public static final String uobm_query11 = onto_dir + "uobm/queries/standard_q11.sparql"; + public static final String uobm_query12 = onto_dir + "uobm/queries/standard_q12.sparql"; + public static final String uobm_query14 = onto_dir + "uobm/queries/standard_q14.sparql"; + public static final String uobm_query15 = onto_dir + "uobm/queries/standard_q15.sparql"; + public static final String uobm_query_multi = onto_dir + "uobm/queries/standard_multi.sparql"; + public static final String uobm_generated_query1 = onto_dir + "uobm/queries/generated_q1.sparql"; + public static final String uobm_query_group3 = onto_dir + "uobm/queries/standard_group3.sparql"; + + public static final String npd_tbox = onto_dir + "npd/npd-all-minus-datatype.owl"; + // "npd/npd-all.owl"; + // "npd-all-minus-datatype.owl"; + public static final String npd_abox = onto_dir + "npd/data/npd-data-dump-minus-datatype-new.ttl"; + // "npd/data/npd-data-dump-processed.ttl"; + // "npd-data-dump-minus-datatype-old.ttl"; + public static final String npd_query = onto_dir + "npd/queries/atomic.sparql"; + + public static final String npd_bench_tbox = onto_dir + "npd-benchmark/npd-v2-ql_a.owl"; + // npd-all-minus-datatype.owl"; + public static final String npd_bench_abox = onto_dir + "npd-benchmark/npd-v2-ql_a.ttl"; + // npd-data-dump-minus-datatype-old.ttl"; + public static final String npd_bench_query = onto_dir + "npd-benchmark/queries/all.sparql"; + + public static final String dbpedia_tbox = onto_dir + "dbpedia/integratedOntology-all-in-one-minus-datatype.owl"; + public static final String dbpedia_abox = onto_dir + "dbpedia/data/dbpedia-minus-datatype-new.ttl"; + public static final String dbpedia_query = onto_dir + "dbpedia/queries/atomic_ground.sparql"; + public static final String dbpedia_query274 = onto_dir + "dbpedia/atomic_q274.sparql"; + + public static final String dbpedia_latest_tbox = onto_dir + "dbpedia/dbpedia_2014.owl"; + public static final String travel_tbox = onto_dir + "dbpedia/travel.owl"; + public static final String dbpedia_tbox_simple = onto_dir + "dbpedia/dbpedia_simple.owl"; + + public static final String bioModels_tbox = onto_dir + "biomodels/biomodels-21.owl"; + public static final String bioModels_abox = onto_dir + "biomodels/data_processed_1.ttl"; + public static final String bioModels_queries = onto_dir + "biomodels/queries/queries.sparql"; + + public static final String chembl_tbox = onto_dir + "bio2rdf/chembl/cco-processed-noDPR-noDPD.ttl"; + public static final String chembl_abox = onto_dir + "bio2rdf/chembl/graph sampling old/sample_100.nt"; + public static final String chembl_queries = onto_dir + "bio2rdf/chembl/queries/problematic.sparql"; + //"bio2rdf/chembl/queries/atomic_one_filtered.sparql"; // + + public static final String reactome_tbox = onto_dir + "bio2rdf/reactome/biopax-level3-processed.owl"; + public static final String reactome_abox = onto_dir + "bio2rdf/reactome/graph sampling old/sample.ttl"; + //data/data.ttl"; //graph sampling old/reactome_sample_10.ttl"; // + public static final String reactome_queries = onto_dir + "bio2rdf/reactome/queries/atomic.sparql"; + + public static final String uniprot_tbox = onto_dir + "bio2rdf/uniprot/core-processed.owl"; + public static final String uniprot_abox = onto_dir + "bio2rdf/uniprot/graph sampling/sample_1.nt"; + public static final String uniprot_queries = onto_dir + "bio2rdf/uniprot/queries/atomic_one.sparql"; + + public static final String atlas_tbox = onto_dir + "bio2rdf/atlas/gxaterms.owl"; + public static final String atlas_abox = onto_dir + "bio2rdf/atlas/graph sampling/sample_1.nt"; + public static final String atlas_queries = onto_dir + "bio2rdf/atlas/queries/atomic_one.sparql"; + QueryReasoner pagoda; + + // private void printPredicatesWithGap() { +// for (String p: ((MyQueryReasoner) pagoda).getPredicatesWithGap()) { +// System.out.println(p); +// } +// } + Timer timer = new Timer(); + + public PagodaTester(QueryReasoner reasoner) { + pagoda = reasoner; + } + + public static void main(String... args) { + if(args.length == 0) { +// args = new String[] {test_tbox, test_abox, test_query}; +// args = new String[] {lubm_tbox, lubm_abox, lubm_query}; +// args = new String[] {uobm_tbox, uobm_abox, uobm_query}; +// args = new String[] {fly, "null", fly_query}; +// args = new String[] {dbpedia_tbox, dbpedia_abox, dbpedia_query}; +// args = new String[] {travel_tbox, null, dbpedia_query274}; + args = new String[]{fly, fly_query}; +// args = new String[] {npd_tbox, npd_abox, npd_query}; +// args = new String[] {npd_bench_tbox, npd_bench_abox, npd_bench_query}; +// args = new String[] {"../SemFacet/WebContent/WEB-INF/data/dbpedia.owl", "../SemFacet/WebContent/WEB-INF/data/dbpediaA.nt", null}; +// args = new String[] {"../core/WebContent/WEB-INF/data/fly.owl", "../core/WebContent/WEB-INF/data/fly-data.nt", null}; +// args = new String[] {"data/lubm/univ-bench.owl", "data/lubm/lubm1.ttl", "data/lubm/lubm.sparql", "lubm.ans"}; +// args = new String[] {"data/uobm/univ-bench-dl.owl", "data/uobm/uobm1.ttl", "data/uobm/uobm.sparql", "uobm.ans"}; +// args = new String[] {"data/fly/fly_anatomy_XP_with_GJ_FC_individuals.owl", "data/fly/fly.sparql", "fly.ans"}; +// args = new String[] {bioModels_tbox, bioModels_abox, bioModels_queries}; +// args = new String[] {chembl_tbox, chembl_abox, chembl_queries}; +// args = new String[] {reactome_tbox, reactome_abox, reactome_queries}; +// args = new String[] {reactome_tbox, "/users/yzhou/temp/reactome_debug.ttl", onto_dir +"bio2rdf/reactome/queries/atomic_one_q65.sparql"}; +// args = new String[] {uniprot_tbox.replace(".owl", "-noDis.owl"), "/users/yzhou/temp/uniprot_debug/sample_1_string.nt", uniprot_queries}; +// args = new String[] {uniprot_tbox.replace(".owl", "-noDis.owl"), uniprot_abox, uniprot_queries}; +// args = new String[] {atlas_tbox, atlas_abox, atlas_queries}; +// args = new String[] {onto_dir + "test/unsatisfiable.owl", null, onto_dir + "test/unsatisfiable_queries.sparql"}; +// args = new String[] {onto_dir + "test/jair-example.owl", null, onto_dir + "test/jair-example_query.sparql"}; +// args[2] = args[2].replace(".sparql", "_all_pagoda.sparql"); +// args[2] = args[2].replace(".sparql", "_pellet.sparql"); + } + + PagodaProperties properties = new PagodaProperties("config/uobm.properties"); + + int index = 0; + if(args.length > index) properties.setOntologyPath(args[index++]); + if(args.length > index && (args[index].endsWith(".ttl") || args[index].endsWith(".nt"))) + properties.setDataPath(args[index++]); + if(args.length > index && args[index].endsWith(".sparql")) properties.setQueryPath(args[index++]); + if(args.length > index && !args[index].startsWith("-")) properties.setAnswerPath(args[index++]); + if(args.length > index) properties.setToClassify(Boolean.parseBoolean(args[index++].substring(1))); + if(args.length > index) properties.setToCallHermiT(Boolean.parseBoolean(args[index++].substring(1))); + + Utility.logInfo("Ontology file: " + properties.getOntologyPath()); + Utility.logInfo("Data files: " + properties.getDataPath()); + Utility.logInfo("Query files: " + properties.getQueryPath()); + Utility.logInfo("Answer file: " + properties.getAnswerPath()); + + QueryReasoner pagoda = null; + + try { + Timer t = new Timer(); + pagoda = QueryReasoner.getInstance(properties); + if (pagoda == null) return; + + Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds."); + + if (properties.getQueryPath() != null) + for (String queryFile: properties.getQueryPath().split(";")) + pagoda.evaluate(pagoda.getQueryManager().collectQueryRecords(queryFile)); + + if(properties.getShellMode()) + try { + evaluateConsoleQuery(pagoda); + } catch(IOException e) { + e.printStackTrace(); + } + } finally { + if (pagoda != null) pagoda.dispose(); + } + +// Utility.closeCurrentOut(); + + if(properties.getShellMode()) System.exit(0); + } + + private static void evaluateConsoleQuery(QueryReasoner pagoda) throws IOException { + int ending = (int) '$', symbol; + while(true) { + Utility.logInfo("Input your query ending with $"); + StringBuilder queryBuilder = new StringBuilder(); + while((symbol = System.in.read()) != ending) { + queryBuilder.append((char) symbol); + } + System.in.read(); + if(queryBuilder.length() == 0) return; + pagoda.evaluate_shell(queryBuilder.toString()); + } + } + + void testReactomeQueries() { + evaluate("select ?x where { ?x . }"); + evaluate("select ?y ?z where { ?y ?z . }"); + evaluate("select ?y where { ?y . }", true); + + } + + void testSemFacetQueries() { +// try { +// BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream("query.line"))); +// for (String line; (line = reader.readLine()) != null && !line.isEmpty(); ) +// evaluate(line, true); +// reader.close(); +// } catch (FileNotFoundException e) { +// e.printStackTrace(); +// } catch (IOException e) { +// e.printStackTrace(); +// } + evaluate("select ?x ?z where { ?x ?z }", true); + evaluate("select distinct ?y where { ?x ?y ?z }", true); + evaluate("select distinct ?z where { ?x ?z }", true); + evaluate("select ?y ?z where { ?y ?z .}", true); + } + + void testISGQueries() { + evaluate("select ?z where { ?z .}", false); + evaluate("select ?x where {?x .}", false); + } + + void testSomeTravelQueries() { + evaluate("select ?y ?z where { ?y ?z. }", true); + evaluate("select ?x where {?x . }"); + evaluate("select ?x where {?x . }"); + } + + void testSomeFlyQueries() { + evaluate("select ?x where { ?x . }", false); + + evaluate("select DISTINCT ?z where { ?x ?any . ?any ?z . ?x . } ", true); + + evaluate("Select ?x where { ?x " + + ". ?x " + + " ?any . ?any " + + " " + + " . }", true); + + evaluate("Select ?x where {?x " + + " . ?x " + + " ?any . ?any " + + " " + + " . }", true); + +// evaluate("Select ?x where { " +// + "?x . " +// + "?x ?any . " +// + "?any . }", true); + + evaluate("select DISTINCT ?z where { ?x ?any . " + + "?any ?z . " + + "?x . } ", true); + + evaluate("Select * where {" + + " . " + + " ?z }", true); + + evaluate("SELECT DISTINCT ?x ?z WHERE {?x ?z}", true); + evaluate("SELECT DISTINCT ?x ?z WHERE {?x ?z}", true); + + evaluate("select DISTINCT ?y where { ?x ?y ?z . " + + "?x }", true); + + evaluateQueriesFromFile("/users/yzhou/Downloads/logs(1).log"); + evaluateQueriesFromFile("/users/yzhou/Downloads/logs.log"); + + evaluate("SELECT DISTINCT ?x ?z WHERE {?x ?z}", true); + evaluate("SELECT DISTINCT ?x ?z WHERE {?x ?z}", true); + + evaluate("select ?x ?z where { ?x ?z } ", true); + evaluate("select ?x ?z where { ?x ?z } ", true); + } + + public void evaluateQueriesFromFile(String fileName) { + Scanner scanner = null; + try { + scanner = new Scanner(new File(fileName)); + String line; + while(scanner.hasNextLine()) { + line = scanner.nextLine(); + if(line.startsWith("select")) + evaluate(line, true); + } + } catch(FileNotFoundException e) { + e.printStackTrace(); + } finally { + if(scanner != null) + scanner.close(); + } + } + + private void evaluate(String query) { + evaluate(query, false); + } + + private void evaluate(String query, boolean tag) { + timer.reset(); + AnswerTuples tuples = pagoda.evaluate(query, tag); + int arity = tuples.getArity(); + int count = 0; + for(AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { + tuple = tuples.getTuple(); + for(int i = 0; i < arity; ++i) + tuple.getGroundTerm(i).toString(); +// System.out.print(tuple.getGroundTerm(i).toString() + "\t"); +// System.out.println(); + ++count; + } + tuples.dispose(); + Utility.logInfo("The number of answers for this SemFacet query: " + count); + Utility.logInfo("Total time for this SemFacet query: " + timer.duration()); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/tester/Statistics.java b/src/test/java/uk/ac/ox/cs/pagoda/tester/Statistics.java new file mode 100644 index 0000000..13d7f90 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/tester/Statistics.java @@ -0,0 +1,60 @@ +package uk.ac.ox.cs.pagoda.tester; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.Scanner; + +@Deprecated +public class Statistics { + + double satCheckTime; + double preprocessTime; + LinkedList number = new LinkedList(); + LinkedList time = new LinkedList(); + + public Statistics(String file) { + Scanner scanner = null; + try { + scanner = new Scanner(new File(file)); + for (String line; scanner.hasNextLine(); ) { + line = scanner.nextLine(); + if (line.contains("time for satisfiability checking")) + satCheckTime = Double.parseDouble(line.substring(line.indexOf(": ") + 2)); + else if (line.contains("Preprocessing Done in")) + preprocessTime = Double.parseDouble(line.substring(line.indexOf("in ") + 3, line.indexOf(" second"))); + else if (line.contains("The number of answer tuples:")) + number.add(Integer.parseInt(line.substring(line.indexOf(": ") + 2))); + else if (line.contains("Total time to answer this query:")) + time.add(Double.parseDouble(line.substring(line.indexOf(": ") + 2))); + } + } catch (FileNotFoundException e) { + e.printStackTrace(); + } finally { + if (scanner != null) + scanner.close(); + } + } + + public String diff(String other) { + return diff(new Statistics(other)); + } + + public String diff(Statistics other) { + if (other.number.size() != number.size()) + return "The number of query is different! " + this.number.size() + " v.s. " + other.number.size(); + int i = 0; + Iterator iter1 = number.iterator(), iter2 = other.number.iterator(); + StringBuilder diff = new StringBuilder(); + int a, b; + while (iter1.hasNext()) { + ++i; + if ((a = iter1.next()) != (b = iter2.next())) { + diff.append("Query ").append(i).append(": ").append(a).append(", reference ").append(b).append("\n"); + } + } + return diff.toString(); + } + +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/util/SimpleProgressBarTester.java b/src/test/java/uk/ac/ox/cs/pagoda/util/SimpleProgressBarTester.java new file mode 100644 index 0000000..3de30e4 --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/util/SimpleProgressBarTester.java @@ -0,0 +1,16 @@ +package uk.ac.ox.cs.pagoda.util; + +import org.testng.annotations.Test; + +public class SimpleProgressBarTester { + + @Test + public void test() throws InterruptedException { + SimpleProgressBar simpleProgressBar = new SimpleProgressBar("TestBar", 1000); + for(int i = 0; i < 1000; i++) { + simpleProgressBar.update(i); + Thread.sleep(10); + } + simpleProgressBar.dispose(); + } +} diff --git a/src/test/java/uk/ac/ox/cs/pagoda/util/TestUtil.java b/src/test/java/uk/ac/ox/cs/pagoda/util/TestUtil.java new file mode 100644 index 0000000..c7f024a --- /dev/null +++ b/src/test/java/uk/ac/ox/cs/pagoda/util/TestUtil.java @@ -0,0 +1,97 @@ +package uk.ac.ox.cs.pagoda.util; + +import org.apache.log4j.Appender; +import org.apache.log4j.FileAppender; +import org.apache.log4j.Logger; +import org.semanticweb.owlapi.model.IRI; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Enumeration; +import java.util.Properties; + +import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; + +/** + * A collection of utility methods for testing. + */ +public class TestUtil { + + public static final String CONFIG_FILE = "test.properties"; + private static final Logger LOGGER = Logger.getLogger("Tester"); + private static boolean isConfigLoaded = false; + private static Properties config; + + public static Properties getConfig() { + if(!isConfigLoaded) { + try(InputStream in = TestUtil.class.getClassLoader().getResourceAsStream(CONFIG_FILE)) { + config = new java.util.Properties(); + config.load(in); + in.close(); + isConfigLoaded = true; + } catch (IOException e) { + e.printStackTrace(); + } + } + return config; + } + + public static String combinePaths(String path1, String path2) { + File file1 = new File(path1); + File file2 = new File(file1, path2); + return file2.getPath(); + } + + public static void copyFile(String src, String dst) throws IOException { + Files.copy(Paths.get(src), Paths.get(dst), REPLACE_EXISTING); + } + + /** + * Get the log file, which is assumed unique. + * */ + public static String getLogFileName() { + Enumeration e = Logger.getRootLogger().getAllAppenders(); + while (e.hasMoreElements()){ + Appender app = (Appender)e.nextElement(); + if (app instanceof FileAppender){ + return ((FileAppender)app).getFile(); + } + } + return null; + } + + public static Path getAnswersFilePath(String name) { + URL givenAnswersURL = TestUtil.class.getClassLoader() + .getResource(name); + if(givenAnswersURL == null) throw new RuntimeException("Missing answers file:" + name); + return Paths.get(givenAnswersURL.getPath()); + } + + public static void logInfo(Object msg) { + LOGGER.info(msg); + } + + public static void logDebug(Object msg) { + LOGGER.debug(msg); + } + + public static void logError(Object msg) { + LOGGER.error(msg); + } + + public static void logError(Object msg, Throwable t) { + LOGGER.error(msg, t); + } + + public static final String NS = "http://example.org/test#%s"; + + public static IRI getEntityIRI(String name) { + return IRI.create(String.format(NS, name)); + } + +} -- cgit v1.2.3