aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRncLsn <rnc.lsn@gmail.com>2015-05-19 13:35:52 +0100
committerRncLsn <rnc.lsn@gmail.com>2015-05-19 13:35:52 +0100
commit5d54af2638a53721b414a41356a93686a9616272 (patch)
treee28c64b1887e7e964661d12d96df5b09abd4d9ee
parentc7dbc7c61c7094ea4ec49bd630023f23b92fd9d1 (diff)
downloadACQuA-5d54af2638a53721b414a41356a93686a9616272.tar.gz
ACQuA-5d54af2638a53721b414a41356a93686a9616272.zip
Backup before changes in MyQueryReasoner.
-rw-r--r--.gitignore3
-rw-r--r--src/uk/ac/ox/cs/pagoda/Pagoda.java64
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java176
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java174
-rw-r--r--test/resources/HeavyTests.xml17
-rw-r--r--test/resources/LightTests.xml17
-rw-r--r--test/uk/ac/ox/cs/hermit/HermitQueryReasoner.java48
-rw-r--r--test/uk/ac/ox/cs/hermit/JAIR_HermiT.java20
-rw-r--r--test/uk/ac/ox/cs/pagoda/global_tests/CheckAnswers.java (renamed from test/uk/ac/ox/cs/pagoda/global_tests/CheckAnswersOverDataset.java)23
-rw-r--r--test/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java48
-rw-r--r--test/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java52
-rw-r--r--test/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java5
-rw-r--r--test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java35
-rw-r--r--test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java45
-rw-r--r--test/uk/ac/ox/cs/pagoda/tester/PagodaTester.java310
-rw-r--r--test/uk/ac/ox/cs/pagoda/tester/Statistics.java1
-rw-r--r--test/uk/ac/ox/cs/pagoda/util/TestUtil.java3
17 files changed, 679 insertions, 362 deletions
diff --git a/.gitignore b/.gitignore
index 0d42d8b..55844bd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,5 @@
3/backup/ 3/backup/
4/tmp/ 4/tmp/
5/testcase/ 5/testcase/
6/log4j.log 6/*.log
7/*.pdf \ No newline at end of file
diff --git a/src/uk/ac/ox/cs/pagoda/Pagoda.java b/src/uk/ac/ox/cs/pagoda/Pagoda.java
index 3263c03..4ad7678 100644
--- a/src/uk/ac/ox/cs/pagoda/Pagoda.java
+++ b/src/uk/ac/ox/cs/pagoda/Pagoda.java
@@ -9,7 +9,7 @@ import uk.ac.ox.cs.pagoda.util.Utility;
9import java.nio.file.Path; 9import java.nio.file.Path;
10 10
11/** 11/**
12 * The main class 12 * Executable command line user interface.
13 */ 13 */
14public class Pagoda implements Runnable { 14public class Pagoda implements Runnable {
15 15
@@ -19,31 +19,62 @@ public class Pagoda implements Runnable {
19 private static final String OPTION_ANSWER = "a"; 19 private static final String OPTION_ANSWER = "a";
20 private static final String OPTION_CLASSIFY = "c"; 20 private static final String OPTION_CLASSIFY = "c";
21 private static final String OPTION_HERMIT = "f"; 21 private static final String OPTION_HERMIT = "f";
22 private final Properties properties;
23
24 /**
25 * Do not use it
26 * */
27 private Pagoda() {
28 properties = new Properties();
29 }
22 30
23 public static void main(String... args) { 31 public static void main(String... args) {
24 32
25 Options options = new Options(); 33 Options options = new Options();
26 options.addOption(Option.builder(OPTION_ONTOLOGY).argName(OPTION_ONTOLOGY).required().hasArg().desc("The ontology path").build()); 34 options.addOption(Option.builder(OPTION_ONTOLOGY)
35 .argName(OPTION_ONTOLOGY)
36 .required()
37 .hasArg()
38 .desc("The ontology path")
39 .build());
27 options.addOption(Option.builder(OPTION_DATA).argName(OPTION_DATA).hasArg().desc("The data path").build()); 40 options.addOption(Option.builder(OPTION_DATA).argName(OPTION_DATA).hasArg().desc("The data path").build());
28 options.addOption(Option.builder(OPTION_QUERY).argName(OPTION_QUERY).required().hasArg().desc("The query path").build()); 41 options.addOption(Option.builder(OPTION_QUERY)
29 options.addOption(Option.builder(OPTION_ANSWER).argName(OPTION_ANSWER).hasArg().desc("The answer path").build()); 42 .argName(OPTION_QUERY)
30 options.addOption(Option.builder(OPTION_CLASSIFY).argName(OPTION_CLASSIFY).desc("Tell whether to classify").type(Boolean.class).build()); 43 .required()
31 options.addOption(Option.builder(OPTION_HERMIT).argName(OPTION_HERMIT).desc("Tell whether to call Hermit").type(Boolean.class).build()); 44 .hasArg()
45 .desc("The query path")
46 .build());
47 options.addOption(Option.builder(OPTION_ANSWER)
48 .argName(OPTION_ANSWER)
49 .hasArg()
50 .desc("The answer path")
51 .build());
52 options.addOption(Option.builder(OPTION_CLASSIFY)
53 .argName(OPTION_CLASSIFY)
54 .desc("Tell whether to classify")
55 .type(Boolean.class)
56 .build());
57 options.addOption(Option.builder(OPTION_HERMIT)
58 .argName(OPTION_HERMIT)
59 .desc("Tell whether to call Hermit")
60 .type(Boolean.class)
61 .build());
32 62
33 CommandLineParser parser = new DefaultParser(); 63 CommandLineParser parser = new DefaultParser();
34 try { 64 try {
35 CommandLine cmd = parser.parse( options, args ); 65 CommandLine cmd = parser.parse(options, args);
36 PagodaBuilder pagodaBuilder = Pagoda.builder() 66 PagodaBuilder pagodaBuilder = Pagoda.builder()
37 .ontology(cmd.getOptionValue(OPTION_ONTOLOGY)) 67 .ontology(cmd.getOptionValue(OPTION_ONTOLOGY))
38 .query(cmd.getOptionValue(OPTION_QUERY)); 68 .query(cmd.getOptionValue(OPTION_QUERY));
39 if(cmd.hasOption(OPTION_DATA)) pagodaBuilder.data(cmd.getOptionValue(OPTION_DATA)); 69 if(cmd.hasOption(OPTION_DATA)) pagodaBuilder.data(cmd.getOptionValue(OPTION_DATA));
40 if(cmd.hasOption(OPTION_ANSWER)) pagodaBuilder.answer(cmd.getOptionValue(OPTION_ANSWER)); 70 if(cmd.hasOption(OPTION_ANSWER)) pagodaBuilder.answer(cmd.getOptionValue(OPTION_ANSWER));
41 if(cmd.hasOption(OPTION_CLASSIFY)) pagodaBuilder.classify(Boolean.parseBoolean(cmd.getOptionValue(OPTION_CLASSIFY))); 71 if(cmd.hasOption(OPTION_CLASSIFY))
42 if(cmd.hasOption(OPTION_HERMIT)) pagodaBuilder.hermit(Boolean.parseBoolean(cmd.getOptionValue(OPTION_HERMIT))); 72 pagodaBuilder.classify(Boolean.parseBoolean(cmd.getOptionValue(OPTION_CLASSIFY)));
73 if(cmd.hasOption(OPTION_HERMIT))
74 pagodaBuilder.hermit(Boolean.parseBoolean(cmd.getOptionValue(OPTION_HERMIT)));
43 75
44 pagodaBuilder.build().run(); 76 pagodaBuilder.build().run();
45 } 77 } catch(ParseException exp) {
46 catch( ParseException exp ) {
47 HelpFormatter formatter = new HelpFormatter(); 78 HelpFormatter formatter = new HelpFormatter();
48 formatter.printHelp("PAGOdA", options); 79 formatter.printHelp("PAGOdA", options);
49 Utility.logError("Parsing failed. Reason: " + exp.getMessage()); 80 Utility.logError("Parsing failed. Reason: " + exp.getMessage());
@@ -52,15 +83,6 @@ public class Pagoda implements Runnable {
52 } 83 }
53 84
54 /** 85 /**
55 * Do not use it
56 * */
57 private Pagoda() {
58 properties = new Properties();
59 }
60
61 private final Properties properties;
62
63 /**
64 * Get a builder. 86 * Get a builder.
65 * */ 87 * */
66 public static PagodaBuilder builder() { 88 public static PagodaBuilder builder() {
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
index 1f435b7..dfbcb4d 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
@@ -37,21 +37,19 @@ public class MyQueryReasoner extends QueryReasoner {
37 BasicQueryEngine limitedSkolemUpperStore; 37 BasicQueryEngine limitedSkolemUpperStore;
38// boolean[] namedIndividuals_lazyUpper; 38// boolean[] namedIndividuals_lazyUpper;
39 39
40 OWLOntology elho_ontology; 40 OWLOntology elho_ontology;
41 KarmaQueryEngine elLowerStore = null; 41 KarmaQueryEngine elLowerStore = null;
42 42
43 BasicQueryEngine trackingStore = null; 43 BasicQueryEngine trackingStore = null;
44// boolean[] namedIndividuals_tracking; 44// boolean[] namedIndividuals_tracking;
45
46 boolean equalityTag;
47 boolean multiStageTag;
48 TrackingRuleEncoder encoder; 45 TrackingRuleEncoder encoder;
49 Timer t = new Timer(); 46 private boolean equalityTag;
47 private boolean multiStageTag;
48 private Timer t = new Timer();
50 private Collection<String> predicatesWithGap = null; 49 private Collection<String> predicatesWithGap = null;
51 private Boolean satisfiable; 50 private SatisfiabilityStatus satisfiable;
52 private ConsistencyManager consistency = new ConsistencyManager(this); 51 private ConsistencyManager consistency = new ConsistencyManager(this);
53 private boolean useUpperStores = false; 52 private boolean useUpperStores = false;
54
55 public MyQueryReasoner() { 53 public MyQueryReasoner() {
56 setup(true, true); 54 setup(true, true);
57 } 55 }
@@ -59,7 +57,7 @@ public class MyQueryReasoner extends QueryReasoner {
59 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { 57 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
60 setup(multiStageTag, considerEqualities); 58 setup(multiStageTag, considerEqualities);
61 } 59 }
62 60
63 private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { 61 private BasicQueryEngine getUpperStore(String name, boolean checkValidity) {
64 if (multiStageTag) 62 if (multiStageTag)
65 return new MultiStageQueryEngine(name, checkValidity); 63 return new MultiStageQueryEngine(name, checkValidity);
@@ -69,7 +67,7 @@ public class MyQueryReasoner extends QueryReasoner {
69 } 67 }
70 68
71 public void setup(boolean multiStageTag, boolean considerEqualities) { 69 public void setup(boolean multiStageTag, boolean considerEqualities) {
72 satisfiable = null; 70 satisfiable = SatisfiabilityStatus.UNCHECKED;
73 this.multiStageTag = multiStageTag; 71 this.multiStageTag = multiStageTag;
74 this.equalityTag = considerEqualities; 72 this.equalityTag = considerEqualities;
75 73
@@ -90,7 +88,7 @@ public class MyQueryReasoner extends QueryReasoner {
90 88
91 @Override 89 @Override
92 public void loadOntology(OWLOntology o) { 90 public void loadOntology(OWLOntology o) {
93 if (!equalityTag) { 91 if(!equalityTag) {
94 EqualitiesEliminator eliminator = new EqualitiesEliminator(o); 92 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
95 o = eliminator.getOutputOntology(); 93 o = eliminator.getOutputOntology();
96 eliminator.save(); 94 eliminator.save();
@@ -103,9 +101,9 @@ public class MyQueryReasoner extends QueryReasoner {
103// program.getGeneral().save(); 101// program.getGeneral().save();
104 102
105 useUpperStores = multiStageTag && !program.getGeneral().isHorn(); 103 useUpperStores = multiStageTag && !program.getGeneral().isHorn();
106 if (useUpperStores) { 104 if(useUpperStores) {
107 lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); // 105 lazyUpperStore = getUpperStore("lazy-upper-bound", true);
108 limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true); 106 limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true);
109 } 107 }
110 108
111 importData(program.getAdditionalDataFile()); 109 importData(program.getAdditionalDataFile());
@@ -113,11 +111,11 @@ public class MyQueryReasoner extends QueryReasoner {
113 elho_ontology = new ELHOProfile().getFragment(ontology); 111 elho_ontology = new ELHOProfile().getFragment(ontology);
114 elLowerStore.processOntology(elho_ontology); 112 elLowerStore.processOntology(elho_ontology);
115 } 113 }
116 114
117 public Collection<String> getPredicatesWithGap() { 115 public Collection<String> getPredicatesWithGap() {
118 return predicatesWithGap; 116 return predicatesWithGap;
119 } 117 }
120 118
121 @Override 119 @Override
122 public boolean preprocess() { 120 public boolean preprocess() {
123 t.reset(); 121 t.reset();
@@ -127,7 +125,7 @@ public class MyQueryReasoner extends QueryReasoner {
127 rlLowerStore.importRDFData(name, datafile); 125 rlLowerStore.importRDFData(name, datafile);
128 rlLowerStore.materialise("lower program", program.getLower().toString()); 126 rlLowerStore.materialise("lower program", program.getLower().toString());
129// program.getLower().save(); 127// program.getLower().save();
130 if (!consistency.checkRLLowerBound()) return false; 128 if(!consistency.checkRLLowerBound()) return false;
131 Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); 129 Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber());
132 130
133 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); 131 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
@@ -136,35 +134,35 @@ public class MyQueryReasoner extends QueryReasoner {
136 elLowerStore.materialise("saturate named individuals", originalMarkProgram); 134 elLowerStore.materialise("saturate named individuals", originalMarkProgram);
137 elLowerStore.materialise("lower program", program.getLower().toString()); 135 elLowerStore.materialise("lower program", program.getLower().toString());
138 elLowerStore.initialiseKarma(); 136 elLowerStore.initialiseKarma();
139 if (!consistency.checkELLowerBound()) return false; 137 if(!consistency.checkELLowerBound()) return false;
140 138
141 if (lazyUpperStore != null) { 139 if(lazyUpperStore != null) {
142 lazyUpperStore.importRDFData(name, datafile); 140 lazyUpperStore.importRDFData(name, datafile);
143 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); 141 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram);
144 int tag = lazyUpperStore.materialiseRestrictedly(program, null); 142 int tag = lazyUpperStore.materialiseRestrictedly(program, null);
145 if (tag != 1) { 143 if(tag != 1) {
146 lazyUpperStore.dispose(); 144 lazyUpperStore.dispose();
147 lazyUpperStore = null; 145 lazyUpperStore = null;
148 } 146 }
149 if (tag == -1) return false; 147 if(tag == -1) return false;
150 } 148 }
151 if (consistency.checkUpper(lazyUpperStore)) { 149 if(consistency.checkUpper(lazyUpperStore)) {
152 satisfiable = true; 150 satisfiable = SatisfiabilityStatus.SATISFIABLE;
153 Utility.logInfo("time for satisfiability checking: " + t.duration()); 151 Utility.logInfo("time for satisfiability checking: " + t.duration());
154 } 152 }
155 153
156 if (limitedSkolemUpperStore != null) { 154 if(limitedSkolemUpperStore != null) {
157 limitedSkolemUpperStore.importRDFData(name, datafile); 155 limitedSkolemUpperStore.importRDFData(name, datafile);
158 limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); 156 limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram);
159 int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); 157 int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null);
160 if (tag != 1) { 158 if(tag != 1) {
161 limitedSkolemUpperStore.dispose(); 159 limitedSkolemUpperStore.dispose();
162 limitedSkolemUpperStore = null; 160 limitedSkolemUpperStore = null;
163 } 161 }
164 if (tag == -1) return false; 162 if(tag == -1) return false;
165 } 163 }
166 if (consistency.checkUpper(limitedSkolemUpperStore)) { 164 if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) {
167 satisfiable = true; 165 satisfiable = SatisfiabilityStatus.SATISFIABLE;
168 Utility.logInfo("time for satisfiability checking: " + t.duration()); 166 Utility.logInfo("time for satisfiability checking: " + t.duration());
169 } 167 }
170 168
@@ -176,7 +174,7 @@ public class MyQueryReasoner extends QueryReasoner {
176 predicatesWithGap = gap.getPredicatesWithGap(); 174 predicatesWithGap = gap.getPredicatesWithGap();
177 gap.clear(); 175 gap.clear();
178 176
179 if (program.getGeneral().isHorn()) 177 if(program.getGeneral().isHorn())
180 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); 178 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore);
181 else 179 else
182 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); 180 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore);
@@ -186,21 +184,22 @@ public class MyQueryReasoner extends QueryReasoner {
186 184
187 program.deleteABoxTurtleFile(); 185 program.deleteABoxTurtleFile();
188 186
189 if (!isConsistent()) 187 if(!isConsistent())
190 return false; 188 return false;
191 189
192 consistency.extractBottomFragment(); 190 consistency.extractBottomFragment();
193 consistency.dispose(); 191 consistency.dispose();
192
194 return true; 193 return true;
195 } 194 }
196 195
197 @Override 196 @Override
198 public boolean isConsistent() { 197 public boolean isConsistent() {
199 if (satisfiable == null) { 198 if(satisfiable == SatisfiabilityStatus.UNCHECKED) {
200 satisfiable = consistency.check(); 199 satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE;
201 Utility.logInfo("time for satisfiability checking: " + t.duration()); 200 Utility.logInfo("time for satisfiability checking: " + t.duration());
202 } 201 }
203 return satisfiable; 202 return satisfiable == SatisfiabilityStatus.SATISFIABLE ? true : false;
204 } 203 }
205 204
206 /** 205 /**
@@ -208,8 +207,8 @@ public class MyQueryReasoner extends QueryReasoner {
208 * */ 207 * */
209 private OWLOntology relevantPart(QueryRecord queryRecord) { 208 private OWLOntology relevantPart(QueryRecord queryRecord) {
210 AnswerTuples rlAnswer = null, elAnswer = null; 209 AnswerTuples rlAnswer = null, elAnswer = null;
211 210
212 t.reset(); 211 t.reset();
213 try { 212 try {
214 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 213 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
215 Utility.logDebug(t.duration()); 214 Utility.logDebug(t.duration());
@@ -218,11 +217,11 @@ public class MyQueryReasoner extends QueryReasoner {
218 if (rlAnswer != null) rlAnswer.dispose(); 217 if (rlAnswer != null) rlAnswer.dispose();
219 } 218 }
220 queryRecord.addProcessingTime(Step.LowerBound, t.duration()); 219 queryRecord.addProcessingTime(Step.LowerBound, t.duration());
221 220
222 t.reset(); 221 t.reset();
223 BasicQueryEngine upperStore = queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore; 222 BasicQueryEngine upperStore = queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore;
224 223
225 String[] extendedQuery = queryRecord.getExtendedQueryText(); 224 String[] extendedQuery = queryRecord.getExtendedQueryText();
226 225
227 // TODO why the following??? 226 // TODO why the following???
228 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 227 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
@@ -246,10 +245,10 @@ public class MyQueryReasoner extends QueryReasoner {
246 245
247 queryRecord.addProcessingTime(Step.UpperBound, t.duration()); 246 queryRecord.addProcessingTime(Step.UpperBound, t.duration());
248 if (queryRecord.processed()) { 247 if (queryRecord.processed()) {
249 queryRecord.setDifficulty(Step.UpperBound); 248 queryRecord.setDifficulty(Step.UpperBound);
250 return null; 249 return null;
251 } 250 }
252 251
253 t.reset(); 252 t.reset();
254 try { 253 try {
255 elAnswer = elLowerStore.evaluate(extendedQuery[0], queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); 254 elAnswer = elLowerStore.evaluate(extendedQuery[0], queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers());
@@ -261,48 +260,46 @@ public class MyQueryReasoner extends QueryReasoner {
261 queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); 260 queryRecord.addProcessingTime(Step.ELLowerBound, t.duration());
262 261
263 if (queryRecord.processed()) { 262 if (queryRecord.processed()) {
264 queryRecord.setDifficulty(Step.ELLowerBound); 263 queryRecord.setDifficulty(Step.ELLowerBound);
265 return null; 264 return null;
266 } 265 }
267 266
268 t.reset(); 267 t.reset();
269 268
270 QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); 269 QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord);
271 270
272 OWLOntology knowledgebase; 271 OWLOntology knowledgebase;
273 t.reset(); 272 t.reset();
274// if (program.getGeneral().isHorn()) { 273// if (program.getGeneral().isHorn()) {
275// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true); 274// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true);
276// queryRecord.addProcessingTime(Step.Fragment, t.duration()); 275// queryRecord.addProcessingTime(Step.Fragment, t.duration());
277// return knowledgebase; 276// return knowledgebase;
278// } 277// }
279// else { 278// else {
280 knowledgebase = tracker.extract(trackingStore, consistency.getQueryRecords(), true); 279 knowledgebase = tracker.extract(trackingStore, consistency.getQueryRecords(), true);
281 queryRecord.addProcessingTime(Step.Fragment, t.duration()); 280 queryRecord.addProcessingTime(Step.Fragment, t.duration());
282// } 281// }
283 282
284 if (knowledgebase.isEmpty() || queryRecord.isBottom()) 283 if(knowledgebase.isEmpty() || queryRecord.isBottom())
285 return knowledgebase; 284 return knowledgebase;
286 285
287 if (program.getGeneral().isHorn()) return knowledgebase; 286 if(program.getGeneral().isHorn()) return knowledgebase;
288 287
289// t.reset(); 288// t.reset();
290// if (queryRecord.isHorn() && lazyUpperStore != null) { 289// if (queryRecord.isHorn() && lazyUpperStore != null) {
291//// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true); 290//// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true);
292// } else if (queryRecord.getArity() < 3) { 291// } else if (queryRecord.getArity() < 3) {
293// IterativeRefinement iterativeRefinement = new IterativeRefinement(queryRecord, tracker, trackingStore, consistency.getQueryRecords()); 292// IterativeRefinement iterativeRefinement = new IterativeRefinement(queryRecord, tracker, trackingStore, consistency.getQueryRecords());
294// knowledgebase = iterativeRefinement.extractWithFullABox(importedData.toString(), program.getUpperBottomStrategy()); 293// knowledgebase = iterativeRefinement.extractWithFullABox(importedData.toString(), program.getUpperBottomStrategy());
295// } 294// }
296// 295//
297// queryRecord.addProcessingTime(Step.FragmentRefinement, t.duration()); 296// queryRecord.addProcessingTime(Step.FragmentRefinement, t.duration());
298// 297//
299// if (knowledgebase == null) 298// if (knowledgebase == null)
300// queryRecord.setDifficulty(Step.FragmentRefinement); 299// queryRecord.setDifficulty(Step.FragmentRefinement);
301
302 return knowledgebase;
303 }
304 300
305// int counter = 0; 301 return knowledgebase;
302 }
306 303
307 private String toJsonKeyValuePair(String key, Object value) { 304 private String toJsonKeyValuePair(String key, Object value) {
308 HashMap<String, Object> map = new HashMap<>(); 305 HashMap<String, Object> map = new HashMap<>();
@@ -310,8 +307,10 @@ public class MyQueryReasoner extends QueryReasoner {
310 return QueryRecord.GsonCreator.getInstance().toJson(map); 307 return QueryRecord.GsonCreator.getInstance().toJson(map);
311 } 308 }
312 309
310// int counter = 0;
311
313 private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { 312 private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) {
314 AnswerTuples rlAnswer = null; 313 AnswerTuples rlAnswer = null;
315 try { 314 try {
316 Utility.logDebug(queryText); 315 Utility.logDebug(queryText);
317 rlAnswer = upperStore.evaluate(queryText, answerVariables); 316 rlAnswer = upperStore.evaluate(queryText, answerVariables);
@@ -326,37 +325,38 @@ public class MyQueryReasoner extends QueryReasoner {
326 @Override 325 @Override
327 public void evaluate(QueryRecord queryRecord) { 326 public void evaluate(QueryRecord queryRecord) {
328 OWLOntology knowledgeBase = relevantPart(queryRecord); 327 OWLOntology knowledgeBase = relevantPart(queryRecord);
329 328
330 if (knowledgeBase == null) { 329 if(knowledgeBase == null) {
331 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); 330 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
332 return ; 331 return;
333 } 332 }
334 333
335 int aBoxCount = knowledgeBase.getABoxAxioms(true).size(); 334 int aBoxCount = knowledgeBase.getABoxAxioms(true).size();
336 Utility.logDebug("ABox axioms: " + aBoxCount + " TBox axioms: " + (knowledgeBase.getAxiomCount() - aBoxCount)); 335 Utility.logDebug("ABox axioms: " + aBoxCount + " TBox axioms: " + (knowledgeBase.getAxiomCount() - aBoxCount));
337// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); 336// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl");
338 337
339 Timer t = new Timer(); 338 Timer t = new Timer();
340 Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); 339 Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT());
341// int validNumber = 340// int validNumber =
342 summarisedChecker.check(queryRecord.getGapAnswers()); 341 summarisedChecker.check(queryRecord.getGapAnswers());
343 summarisedChecker.dispose(); 342 summarisedChecker.dispose();
344 Utility.logDebug("Total time for full reasoner: " + t.duration()); 343 Utility.logDebug("Total time for full reasoner: " + t.duration());
345// if (validNumber == 0) { 344// if (validNumber == 0) {
346 queryRecord.markAsProcessed(); 345 queryRecord.markAsProcessed();
347 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); 346 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
348// } 347// }
349 } 348 }
350 349
351 @Override 350 @Override
352 public void evaluateUpper(QueryRecord queryRecord) { 351 public void evaluateUpper(QueryRecord queryRecord) {
353 AnswerTuples rlAnswer = null; 352 AnswerTuples rlAnswer = null;
354 boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; 353 boolean useFull = queryRecord.isBottom() || lazyUpperStore == null;
355 try { 354 try {
356 rlAnswer = (useFull ? trackingStore: lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 355 rlAnswer =
357 queryRecord.updateUpperBoundAnswers(rlAnswer, true); 356 (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
357 queryRecord.updateUpperBoundAnswers(rlAnswer, true);
358 } finally { 358 } finally {
359 if (rlAnswer != null) rlAnswer.dispose(); 359 if(rlAnswer != null) rlAnswer.dispose();
360 } 360 }
361 } 361 }
362 362
@@ -370,4 +370,6 @@ public class MyQueryReasoner extends QueryReasoner {
370 super.dispose(); 370 super.dispose();
371 } 371 }
372 372
373 enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED}
374
373} 375}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
index dfe0c5f..d4f4596 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
@@ -19,24 +19,26 @@ import java.util.Collection;
19 19
20// TODO clean APIs 20// TODO clean APIs
21public abstract class QueryReasoner { 21public abstract class QueryReasoner {
22 22
23 public static final String ImportDataFileSeparator = ";";
24 private static final boolean DEFAULT_MULTI_STAGES = true;
25 private static final boolean DEFAULT_EQUALITIES = true;
26 public boolean fullReasoner = this instanceof MyQueryReasoner;
27 protected StringBuilder importedData = new StringBuilder();
23// protected boolean forSemFacet = false; 28// protected boolean forSemFacet = false;
24 Properties properties; 29 Properties properties;
30 BufferedWriter answerWriter = null;
31 private QueryManager m_queryManager = new QueryManager();
25 32
26 private static boolean defaultMultiStages = true;
27 private static boolean defaultEqualities = true;
28
29 public enum Type { Full, RLU, ELHOU }
30
31 public static QueryReasoner getInstance(Properties p) { 33 public static QueryReasoner getInstance(Properties p) {
32 OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); 34 OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath());
33 QueryReasoner pagoda = getInstance(ontology, p); 35 QueryReasoner pagoda = getInstance(ontology, p);
34 pagoda.properties = p; 36 pagoda.properties = p;
35 pagoda.loadOntology(ontology); 37 pagoda.loadOntology(ontology);
36 pagoda.importData(p.getDataPath()); 38 pagoda.importData(p.getDataPath());
37 if (pagoda.preprocess()) { 39 if (pagoda.preprocess()) {
38 Utility.logInfo("The ontology is consistent!"); 40 Utility.logInfo("The ontology is consistent!");
39 return pagoda; 41 return pagoda;
40 } 42 }
41 else { 43 else {
42 System.out.println("The ontology is inconsistent!"); 44 System.out.println("The ontology is inconsistent!");
@@ -44,60 +46,63 @@ public abstract class QueryReasoner {
44 return null; 46 return null;
45 } 47 }
46 } 48 }
47 49
48 public static QueryReasoner getInstance(OWLOntology o) { 50 public static QueryReasoner getInstance(OWLOntology o) {
49 QueryReasoner pagoda = getInstance(Type.Full, o, defaultMultiStages, defaultEqualities); 51 QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
50 pagoda.properties = new Properties(); 52 pagoda.properties = new Properties();
51 return pagoda; 53 return pagoda;
52 } 54 }
53 55
54 public void setToClassify(boolean flag) {
55 properties.setToClassify(flag);
56 }
57
58 public void setToCallHermiT(boolean flag) {
59 properties.setToCallHermiT(flag);
60 }
61
62 private static QueryReasoner getInstance(OWLOntology o, Properties p) { 56 private static QueryReasoner getInstance(OWLOntology o, Properties p) {
63 return getInstance(Type.Full, o, defaultMultiStages, defaultEqualities); 57 return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
64 } 58 }
65 59
66 public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { 60 public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) {
67// Utility.initialise(); 61// Utility.initialise();
68 QueryReasoner reasoner; 62 QueryReasoner reasoner;
69 if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); 63 if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner();
70 else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); 64 else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner();
71 else 65 else
72 switch (type) { 66 switch (type) {
73 case RLU: 67 case RLU:
74 reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); break; 68 reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities);
75 case ELHOU: 69 break;
76 reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); break; 70 case ELHOU:
77 default: 71 reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities);
78 reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); 72 break;
73 default:
74 reasoner = new MyQueryReasoner(performMultiStages, considerEqualities);
79 } 75 }
80 return reasoner; 76 return reasoner;
77 }
78
79 public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) {
80 return new HermiTReasoner(toCheckSatisfiability);
81 }
82
83 public void setToClassify(boolean flag) {
84 properties.setToClassify(flag);
85 }
86
87 public void setToCallHermiT(boolean flag) {
88 properties.setToCallHermiT(flag);
81 } 89 }
82
83 public static final String ImportDataFileSeparator = ";";
84 protected StringBuilder importedData = new StringBuilder();
85 90
86 public void importData(String datafile) { 91 public void importData(String datafile) {
87 if (datafile != null && !datafile.equalsIgnoreCase("null")) 92 if (datafile != null && !datafile.equalsIgnoreCase("null"))
88 importData(datafile.split(ImportDataFileSeparator)); 93 importData(datafile.split(ImportDataFileSeparator));
89 } 94 }
90 95
91 public void importData(String[] datafiles) { 96 public void importData(String[] datafiles) {
92 if (datafiles != null) { 97 if (datafiles != null) {
93 for (String datafile: datafiles) { 98 for (String datafile: datafiles) {
94 File file = new File(datafile); 99 File file = new File(datafile);
95 if (file.exists()) { 100 if (file.exists()) {
96 if (file.isFile()) importDataFile(file); 101 if (file.isFile()) importDataFile(file);
97 else importDataDirectory(file); 102 else importDataDirectory(file);
98 } 103 }
99 else { 104 else {
100 Utility.logError("warning: file " + datafile + " doesn't exists."); 105 Utility.logError("warning: file " + datafile + " doesn't exists.");
101 } 106 }
102 } 107 }
103 } 108 }
@@ -115,80 +120,75 @@ public abstract class QueryReasoner {
115 datafile = file.getCanonicalPath(); 120 datafile = file.getCanonicalPath();
116 } catch (IOException e) { 121 } catch (IOException e) {
117 e.printStackTrace(); 122 e.printStackTrace();
118 return ; 123 return;
119 } 124 }
120 importDataFile(datafile); 125 importDataFile(datafile);
121 } 126 }
122 127
123 protected final void importDataFile(String datafile) { 128 protected final void importDataFile(String datafile) {
124 if (importedData.length() == 0) 129 if (importedData.length() == 0)
125 importedData.append(datafile); 130 importedData.append(datafile);
126 else 131 else
127 importedData.append(ImportDataFileSeparator).append(datafile); 132 importedData.append(ImportDataFileSeparator).append(datafile);
128 133
129 } 134 }
130
131 public abstract void loadOntology(OWLOntology ontology);
132
133 public abstract boolean preprocess();
134 135
135 public abstract boolean isConsistent(); 136 public abstract void loadOntology(OWLOntology ontology);
136 137
137 public boolean fullReasoner = this instanceof MyQueryReasoner; 138 public abstract boolean preprocess();
138 139
139 public abstract void evaluate(QueryRecord record); 140 public abstract boolean isConsistent();
140 141
141 public abstract void evaluateUpper(QueryRecord record); 142 public abstract void evaluate(QueryRecord record);
143
144 public abstract void evaluateUpper(QueryRecord record);
142 145
143 public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { 146 public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) {
144 if (forFacetGeneration) { 147 if (forFacetGeneration) {
145 QueryRecord record = m_queryManager.create(queryText); 148 QueryRecord record = m_queryManager.create(queryText);
146 Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); 149 Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText);
147 if (!record.processed()) 150 if(!record.processed())
148 evaluateUpper(record); 151 evaluateUpper(record);
149// AnswerTuples tuples = record.getUpperBoundAnswers(); 152// AnswerTuples tuples = record.getUpperBoundAnswers();
150// for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { 153// for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) {
151// tuple = tuples.getTuple(); 154// tuple = tuples.getTuple();
152// if (tuple.toString().contains("NC")) 155// if (tuple.toString().contains("NC"))
153// System.out.println(tuple.toString()); 156// System.out.println(tuple.toString());
154// } 157// }
155 return record.getUpperBoundAnswers(); 158 return record.getUpperBoundAnswers();
156 } 159 } else
157 else 160 return evaluate(queryText);
158 return evaluate(queryText);
159 } 161 }
160 162
163// public void evaluate(Collection<QueryRecord> queryRecords) {
164// evaluate(queryRecords);
165// }
166
161 public AnswerTuples evaluate(String queryText) { 167 public AnswerTuples evaluate(String queryText) {
162 QueryRecord record = m_queryManager.create(queryText); 168 QueryRecord record = m_queryManager.create(queryText);
163 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); 169 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
164 if (!record.processed()) 170 if(!record.processed())
165 evaluate(record); 171 evaluate(record);
166 AnswerTuples answer = record.getAnswers(); 172 AnswerTuples answer = record.getAnswers();
167 record.dispose(); 173 record.dispose();
168 return answer; 174 return answer;
169 175
170 } 176 }
171 177
172 public void evaluate_shell(String queryText) { 178 public void evaluate_shell(String queryText) {
173 QueryRecord record = m_queryManager.create(queryText); 179 QueryRecord record = m_queryManager.create(queryText);
174 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); 180 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
175 if (!record.processed()) 181 if(!record.processed())
176 evaluate(record); 182 evaluate(record);
177 Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); 183 Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple());
178 record.dispose(); 184 record.dispose();
179 185
180 } 186 }
181 187
182// public void evaluate(Collection<QueryRecord> queryRecords) {
183// evaluate(queryRecords);
184// }
185
186 BufferedWriter answerWriter = null;
187
188 public void evaluate(Collection<QueryRecord> queryRecords) { 188 public void evaluate(Collection<QueryRecord> queryRecords) {
189 if (!isConsistent()) { 189 if (!isConsistent()) {
190 Utility.logDebug("The ontology and dataset is inconsistent."); 190 Utility.logDebug("The ontology and dataset is inconsistent.");
191 return ; 191 return;
192 } 192 }
193 193
194 if(properties.getAnswerPath() != null && answerWriter == null) { 194 if(properties.getAnswerPath() != null && answerWriter == null) {
@@ -199,21 +199,21 @@ public abstract class QueryReasoner {
199 e.printStackTrace(); 199 e.printStackTrace();
200 } 200 }
201 } 201 }
202 202
203 Timer t = new Timer(); 203 Timer t = new Timer();
204 Gson gson = QueryRecord.GsonCreator.getInstance(); 204 Gson gson = QueryRecord.GsonCreator.getInstance();
205 for (QueryRecord record: queryRecords) { 205 for (QueryRecord record: queryRecords) {
206// if (Integer.parseInt(record.getQueryID()) != 218) continue; 206// if (Integer.parseInt(record.getQueryID()) != 218) continue;
207 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", 207 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------",
208 record.getQueryText()); 208 record.getQueryText());
209 if (!record.processed()) { 209 if (!record.processed()) {
210 t.reset(); 210 t.reset();
211 if (!record.processed()) 211 if (!record.processed())
212 evaluate(record); 212 evaluate(record);
213 Utility.logInfo("Total time to answer this query: " + t.duration()); 213 Utility.logInfo("Total time to answer this query: " + t.duration());
214 if (!fullReasoner && !record.processed()) { 214 if (!fullReasoner && !record.processed()) {
215 Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); 215 Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds.");
216 continue; 216 continue;
217 } 217 }
218 } 218 }
219 record.outputAnswerStatistics(); 219 record.outputAnswerStatistics();
@@ -225,7 +225,7 @@ public abstract class QueryReasoner {
225// queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record))); 225// queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record)));
226 queryRecords.stream().forEach(record -> record.dispose()); 226 queryRecords.stream().forEach(record -> record.dispose());
227 } 227 }
228 228
229 public void dispose() { 229 public void dispose() {
230 if (answerWriter != null) { 230 if (answerWriter != null) {
231 try { 231 try {
@@ -235,17 +235,13 @@ public abstract class QueryReasoner {
235 } 235 }
236 } 236 }
237// Utility.cleanup(); 237// Utility.cleanup();
238 } 238 }
239
240 private QueryManager m_queryManager = new QueryManager();
241 239
242 public QueryManager getQueryManager() { 240 public QueryManager getQueryManager() {
243 return m_queryManager; 241 return m_queryManager;
244 } 242 }
245 243
246 244
247 public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { 245 public enum Type {Full, RLU, ELHOU}
248 return new HermiTReasoner(toCheckSatisfiability);
249 }
250 246
251} 247}
diff --git a/test/resources/HeavyTests.xml b/test/resources/HeavyTests.xml
new file mode 100644
index 0000000..4a96553
--- /dev/null
+++ b/test/resources/HeavyTests.xml
@@ -0,0 +1,17 @@
1<?xml version="1.0" encoding="UTF-8"?>
2<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
3
4<suite name="HeavyTests">
5
6 <test name="heavy">
7 <groups>
8 <run>
9 <include name="heavy"/>
10 </run>
11 </groups>
12 <classes>
13 <class name="uk.ac.ox.cs.pagoda.global_tests.TestPagodaUOBM"/>
14 <class name="uk.ac.ox.cs.pagoda.global_tests.TestPagodaLUBM"/>
15 </classes>
16 </test>
17</suite> \ No newline at end of file
diff --git a/test/resources/LightTests.xml b/test/resources/LightTests.xml
new file mode 100644
index 0000000..dcac0dd
--- /dev/null
+++ b/test/resources/LightTests.xml
@@ -0,0 +1,17 @@
1<?xml version="1.0" encoding="UTF-8"?>
2<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
3
4<suite name="LightTests">
5 <test name="light">
6 <groups>
7 <run>
8 <include name="light"/>
9 </run>
10 </groups>
11
12 <classes>
13 <class name="uk.ac.ox.cs.pagoda.global_tests.TestPagodaUOBM"/>
14 <class name="uk.ac.ox.cs.pagoda.global_tests.TestPagodaLUBM"/>
15 </classes>
16 </test>
17</suite> \ No newline at end of file
diff --git a/test/uk/ac/ox/cs/hermit/HermitQueryReasoner.java b/test/uk/ac/ox/cs/hermit/HermitQueryReasoner.java
index f6246f8..008fcb2 100644
--- a/test/uk/ac/ox/cs/hermit/HermitQueryReasoner.java
+++ b/test/uk/ac/ox/cs/hermit/HermitQueryReasoner.java
@@ -1,43 +1,21 @@
1package uk.ac.ox.cs.hermit; 1package uk.ac.ox.cs.hermit;
2 2
3import java.io.File;
4import java.io.FileNotFoundException;
5import java.io.FileOutputStream;
6import java.io.IOException;
7import java.io.PrintStream;
8import java.util.HashSet;
9import java.util.Set;
10import java.util.concurrent.Callable;
11import java.util.concurrent.ExecutionException;
12import java.util.concurrent.ExecutorService;
13import java.util.concurrent.Executors;
14import java.util.concurrent.Future;
15import java.util.concurrent.TimeUnit;
16import java.util.concurrent.TimeoutException;
17
18import org.semanticweb.HermiT.Reasoner; 3import org.semanticweb.HermiT.Reasoner;
19import org.semanticweb.HermiT.model.Atom; 4import org.semanticweb.HermiT.model.Atom;
20import org.semanticweb.HermiT.model.AtomicRole; 5import org.semanticweb.HermiT.model.AtomicRole;
21import org.semanticweb.owlapi.model.IRI; 6import org.semanticweb.owlapi.model.*;
22import org.semanticweb.owlapi.model.OWLAxiom;
23import org.semanticweb.owlapi.model.OWLClassExpression;
24import org.semanticweb.owlapi.model.OWLDataFactory;
25import org.semanticweb.owlapi.model.OWLDatatype;
26import org.semanticweb.owlapi.model.OWLIndividual;
27import org.semanticweb.owlapi.model.OWLNamedIndividual;
28import org.semanticweb.owlapi.model.OWLObjectProperty;
29import org.semanticweb.owlapi.model.OWLOntology;
30import org.semanticweb.owlapi.model.OWLOntologyCreationException;
31import org.semanticweb.owlapi.model.OWLOntologyManager;
32import org.semanticweb.owlapi.model.OWLOntologyStorageException;
33import org.semanticweb.owlapi.reasoner.Node; 7import org.semanticweb.owlapi.reasoner.Node;
34
35import uk.ac.ox.cs.pagoda.owl.OWLHelper; 8import uk.ac.ox.cs.pagoda.owl.OWLHelper;
36import uk.ac.ox.cs.pagoda.owl.QueryRoller; 9import uk.ac.ox.cs.pagoda.owl.QueryRoller;
37import uk.ac.ox.cs.pagoda.query.QueryManager; 10import uk.ac.ox.cs.pagoda.query.QueryManager;
38import uk.ac.ox.cs.pagoda.query.QueryRecord; 11import uk.ac.ox.cs.pagoda.query.QueryRecord;
39import uk.ac.ox.cs.pagoda.util.Timer; 12import uk.ac.ox.cs.pagoda.util.Timer;
40 13
14import java.io.*;
15import java.util.HashSet;
16import java.util.Set;
17import java.util.concurrent.*;
18
41public class HermitQueryReasoner { 19public class HermitQueryReasoner {
42 20
43 public static void main(String... args) throws FileNotFoundException, OWLOntologyCreationException, OWLOntologyStorageException { 21 public static void main(String... args) throws FileNotFoundException, OWLOntologyCreationException, OWLOntologyStorageException {
@@ -48,11 +26,13 @@ public class HermitQueryReasoner {
48// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/npd/npd-all-minus-datatype.owl", "/media/krr-nas-share/Yujiao/ontologies/npd/data/npd-data-dump-minus-datatype-new.ttl", "/users/yzhou/ontologies/npd/queries/atomic.sparql"}; 26// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/npd/npd-all-minus-datatype.owl", "/media/krr-nas-share/Yujiao/ontologies/npd/data/npd-data-dump-minus-datatype-new.ttl", "/users/yzhou/ontologies/npd/queries/atomic.sparql"};
49// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/npd/npd-all.owl", "/media/krr-nas-share/Yujiao/ontologies/npd/data/npd-data-dump-processed.ttl", "/users/yzhou/ontologies/npd/queries/atomic.sparql"}; 27// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/npd/npd-all.owl", "/media/krr-nas-share/Yujiao/ontologies/npd/data/npd-data-dump-processed.ttl", "/users/yzhou/ontologies/npd/queries/atomic.sparql"};
50// args = new String[] {PagodaTester.dbpedia_tbox, PagodaTester.dbpedia_abox, PagodaTester.dbpedia_query}; 28// args = new String[] {PagodaTester.dbpedia_tbox, PagodaTester.dbpedia_abox, PagodaTester.dbpedia_query};
51// args = new String[] {"/users/yzhou/ontologies/test/unsatisfiable.owl", null, "/users/yzhou/ontologies/test/unsatisfiable_queries.sparql"}; 29// args = new String[] {"/users/yzhou/ontologies/answersCorrectness/unsatisfiable.owl", null, "/users/yzhou/ontologies/answersCorrectness/unsatisfiable_queries.sparql"};
52 30
53// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/cco-processed-noDPR-noDPD.ttl", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/graph sampling/sample_100.nt", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/queries/atomic_one_filtered.sparql", "../test-share/results/chembl/hermit_1p"}; 31// args = new String[] {"/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/cco-processed-noDPR-noDPD.ttl", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/graph sampling/sample_100.nt", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/chembl/queries/atomic_one_filtered.sparql", "../answersCorrectness-share/results/chembl/hermit_1p"};
54 args = new String[] {"/users/yzhou/temp/uniprot_debug/core-processed-noDis.owl", "/users/yzhou/temp/uniprot_debug/sample_1_removed.nt", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/queries/atomic_one.sparql", "../test-share/results/uniprot/hermit_1p"}; } 32 args =
55// args = new String[] {"imported.owl", "", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/queries/atomic_one.sparql", "../test-share/results/uniprot/hermit_1p"}; } 33 new String[]{"/users/yzhou/temp/uniprot_debug/core-processed-noDis.owl", "/users/yzhou/temp/uniprot_debug/sample_1_removed.nt", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/queries/atomic_one.sparql", "../answersCorrectness-share/results/uniprot/hermit_1p"};
34 }
35// args = new String[] {"imported.owl", "", "/media/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/queries/atomic_one.sparql", "../answersCorrectness-share/results/uniprot/hermit_1p"}; }
56 36
57 37
58 PrintStream ps = args.length < 4 ? null : new PrintStream(new File(args[3])); 38 PrintStream ps = args.length < 4 ? null : new PrintStream(new File(args[3]));
@@ -60,8 +40,8 @@ public class HermitQueryReasoner {
60 if (args[i] == null || args[i].equalsIgnoreCase("null")) args[i] = ""; 40 if (args[i] == null || args[i].equalsIgnoreCase("null")) args[i] = "";
61 System.out.println("Argument " + i + ": " + args[i]); 41 System.out.println("Argument " + i + ": " + args[i]);
62 } 42 }
63 43
64// PrintStream ps = null; // new PrintStream(new File("../test-share/results/reactome/ ")); 44// PrintStream ps = null; // new PrintStream(new File("../answersCorrectness-share/results/reactome/ "));
65 if (ps != null) System.setOut(ps); 45 if (ps != null) System.setOut(ps);
66 46
67 Timer t = new Timer(); 47 Timer t = new Timer();
diff --git a/test/uk/ac/ox/cs/hermit/JAIR_HermiT.java b/test/uk/ac/ox/cs/hermit/JAIR_HermiT.java
index a3264ba..72e7af8 100644
--- a/test/uk/ac/ox/cs/hermit/JAIR_HermiT.java
+++ b/test/uk/ac/ox/cs/hermit/JAIR_HermiT.java
@@ -11,8 +11,8 @@ public class JAIR_HermiT {
11 String[] args = new String[] { 11 String[] args = new String[] {
12 TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), 12 TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"),
13 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1_owl"), 13 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1_owl"),
14 TestUtil.combinePaths(ontoDir, "lubm/queries/test.sparql") 14 TestUtil.combinePaths(ontoDir, "lubm/queries/answersCorrectness.sparql")
15// , "/home/yzhou/java-workspace/test-share/results_new/lubm1/hermit" 15// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/hermit"
16 }; 16 };
17 HermitQueryReasoner.main(args); 17 HermitQueryReasoner.main(args);
18 } 18 }
@@ -24,7 +24,7 @@ public class JAIR_HermiT {
24 "/home/yzhou/backup/20141212/univ-bench-queries.owl", 24 "/home/yzhou/backup/20141212/univ-bench-queries.owl",
25 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1_owl"), 25 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1_owl"),
26 TestUtil.combinePaths(ontoDir, "lubm/queries/atomic_lubm.sparql") 26 TestUtil.combinePaths(ontoDir, "lubm/queries/atomic_lubm.sparql")
27// , "/home/yzhou/java-workspace/test-share/results_new/lubm1/hermit_rolledUp" 27// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/hermit_rolledUp"
28 }; 28 };
29 HermitQueryReasoner.main(args); 29 HermitQueryReasoner.main(args);
30 } 30 }
@@ -37,7 +37,7 @@ public class JAIR_HermiT {
37 TestUtil.combinePaths(ontoDir, "uobm/data/uobm1_owl_withDeclaration"), 37 TestUtil.combinePaths(ontoDir, "uobm/data/uobm1_owl_withDeclaration"),
38 TestUtil.combinePaths(ontoDir, "uobm/queries/standard.sparql") 38 TestUtil.combinePaths(ontoDir, "uobm/queries/standard.sparql")
39// , "hermit_uobm1.out" 39// , "hermit_uobm1.out"
40// , "/home/yzhou/java-workspace/test-share/results_new/uobm1/hermit" 40// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/hermit"
41 }; 41 };
42 HermitQueryReasoner.main(args); 42 HermitQueryReasoner.main(args);
43 } 43 }
@@ -50,7 +50,7 @@ public class JAIR_HermiT {
50 TestUtil.combinePaths(ontoDir, "uobm/data/uobm1_owl_withDeclaration"), 50 TestUtil.combinePaths(ontoDir, "uobm/data/uobm1_owl_withDeclaration"),
51 TestUtil.combinePaths(ontoDir, "uobm/queries/atomic_uobm.sparql") 51 TestUtil.combinePaths(ontoDir, "uobm/queries/atomic_uobm.sparql")
52 , "hermit_uobm1_rolledUp.out" 52 , "hermit_uobm1_rolledUp.out"
53// , "/home/yzhou/java-workspace/test-share/results_new/uobm1/hermit_rolledUp" 53// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/hermit_rolledUp"
54 }; 54 };
55 HermitQueryReasoner.main(args); 55 HermitQueryReasoner.main(args);
56 } 56 }
@@ -75,7 +75,7 @@ public class JAIR_HermiT {
75 TestUtil.combinePaths(ontoDir, "npd/data/npd-data-dump-minus-datatype-new.ttl"), 75 TestUtil.combinePaths(ontoDir, "npd/data/npd-data-dump-minus-datatype-new.ttl"),
76 TestUtil.combinePaths(ontoDir, "npd/queries/atomic_ground.sparql") 76 TestUtil.combinePaths(ontoDir, "npd/queries/atomic_ground.sparql")
77 , "hermit_npd.out" 77 , "hermit_npd.out"
78// , "/home/yzhou/java-workspace/test-share/results_new/npd/hermit" 78// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/npd/hermit"
79 ); 79 );
80 } 80 }
81 81
@@ -86,7 +86,7 @@ public class JAIR_HermiT {
86 TestUtil.combinePaths(ontoDir, "dbpedia/integratedOntology-all-in-one-minus-datatype.owl"), 86 TestUtil.combinePaths(ontoDir, "dbpedia/integratedOntology-all-in-one-minus-datatype.owl"),
87 TestUtil.combinePaths(ontoDir, "dbpedia/data/dbpedia-minus-datatype-new.ttl"), 87 TestUtil.combinePaths(ontoDir, "dbpedia/data/dbpedia-minus-datatype-new.ttl"),
88 TestUtil.combinePaths(ontoDir, "dbpedia/queries/atomic_ground.sparql") 88 TestUtil.combinePaths(ontoDir, "dbpedia/queries/atomic_ground.sparql")
89 , "/home/yzhou/java-workspace/test-share/results_new/dbpedia/hermit" 89 , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/dbpedia/hermit"
90 ); 90 );
91 } 91 }
92 92
@@ -97,7 +97,7 @@ public class JAIR_HermiT {
97 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"), 97 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"),
98 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/reactome_sample_10.ttl"), 98 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/reactome_sample_10.ttl"),
99 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/atomic_ground.sparql") 99 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/atomic_ground.sparql")
100 , "/home/yzhou/java-workspace/test-share/results_new/reactome/hermit_10p" 100 , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/reactome/hermit_10p"
101 ); 101 );
102 } 102 }
103 103
@@ -110,7 +110,7 @@ public class JAIR_HermiT {
110 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/graph sampling/sample_1.nt"), 110 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/graph sampling/sample_1.nt"),
111 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql") 111 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql")
112 , "hermit_chembl.out" 112 , "hermit_chembl.out"
113// , "/home/yzhou/java-workspace/test-share/results_new/chembl/hermit_1p" 113// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/chembl/hermit_1p"
114 ); 114 );
115 } 115 }
116 116
@@ -122,7 +122,7 @@ public class JAIR_HermiT {
122 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/graph sampling/sample_1.nt"), 122 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/graph sampling/sample_1.nt"),
123 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql") 123 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql")
124 , "hermit_uniprot.out" 124 , "hermit_uniprot.out"
125// , "/home/yzhou/java-workspace/test-share/results_new/uniprot/hermit_1p" 125// , "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uniprot/hermit_1p"
126 ); 126 );
127 } 127 }
128 128
diff --git a/test/uk/ac/ox/cs/pagoda/global_tests/CheckAnswersOverDataset.java b/test/uk/ac/ox/cs/pagoda/global_tests/CheckAnswers.java
index 424afa2..14050ce 100644
--- a/test/uk/ac/ox/cs/pagoda/global_tests/CheckAnswersOverDataset.java
+++ b/test/uk/ac/ox/cs/pagoda/global_tests/CheckAnswers.java
@@ -3,37 +3,24 @@ package uk.ac.ox.cs.pagoda.global_tests;
3import com.google.gson.Gson; 3import com.google.gson.Gson;
4import com.google.gson.reflect.TypeToken; 4import com.google.gson.reflect.TypeToken;
5import org.testng.Assert; 5import org.testng.Assert;
6import uk.ac.ox.cs.pagoda.Pagoda;
7import uk.ac.ox.cs.pagoda.query.QueryRecord; 6import uk.ac.ox.cs.pagoda.query.QueryRecord;
8 7
9import java.io.BufferedReader; 8import java.io.BufferedReader;
10import java.io.File;
11import java.io.IOException; 9import java.io.IOException;
12import java.lang.reflect.Type; 10import java.lang.reflect.Type;
13import java.nio.file.Files; 11import java.nio.file.Files;
14import java.nio.file.Path; 12import java.nio.file.Path;
15import java.nio.file.Paths;
16import java.util.Set; 13import java.util.Set;
17 14
18/** 15/**
19 * Given an instance of Pagoda, it checks the returned answers. 16 * It provides auxiliary methods for checking answers.
20 * */ 17 * */
21public class CheckAnswersOverDataset { 18public class CheckAnswers {
22 19
23 public static void check(Pagoda pagoda, Path givenAnswers) { 20 private CheckAnswers() {
24 try {
25// Utility.setLogLevel(Level.DEBUG); // uncomment for outputting partial results
26 Path computedAnswers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath());
27 new File(computedAnswers.toString()).deleteOnExit();
28
29 pagoda.run();
30 assertSameContent(computedAnswers, givenAnswers);
31 } catch (IOException e) {
32 e.printStackTrace();
33 }
34 } 21 }
35 22
36 private static void assertSameContent(Path computedAnswersFile, Path givenAnswersFile) throws IOException { 23 public static void assertSameAnswers(Path computedAnswersFile, Path givenAnswersFile) throws IOException {
37 BufferedReader computedReader = Files.newBufferedReader(computedAnswersFile); 24 BufferedReader computedReader = Files.newBufferedReader(computedAnswersFile);
38 BufferedReader givenReader = Files.newBufferedReader(givenAnswersFile); 25 BufferedReader givenReader = Files.newBufferedReader(givenAnswersFile);
39 26
diff --git a/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java b/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java
index 0d77fdb..18f6cf9 100644
--- a/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java
+++ b/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_PAGOdA.java
@@ -8,16 +8,24 @@ import java.io.IOException;
8 8
9public class JAIR_PAGOdA { 9public class JAIR_PAGOdA {
10 10
11 public static void main(String... args) {
12 try {
13 new JAIR_PAGOdA().lubm1();
14 } catch(IOException e) {
15 e.printStackTrace();
16 }
17 }
18
11 @Test 19 @Test
12 public void lubm1() throws IOException { 20 public void lubm1() throws IOException {
13 String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); 21 String ontoDir = TestUtil.getConfig().getProperty("ontoDir");
14 String[] args = new String[] { 22 String[] args = new String[] {
15 TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"), 23 TestUtil.combinePaths(ontoDir, "lubm/univ-bench.owl"),
16 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"), 24 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"),
17 TestUtil.combinePaths(ontoDir, "lubm/queries/test.sparql") 25 TestUtil.combinePaths(ontoDir, "lubm/queries/answersCorrectness.sparql")
18 }; 26 };
19 PagodaTester.main(args); 27 PagodaTester.main(args);
20 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/lubm1/pagoda"); 28 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/pagoda");
21 } 29 }
22 30
23 @Test 31 @Test
@@ -29,7 +37,7 @@ public class JAIR_PAGOdA {
29 TestUtil.combinePaths(ontoDir, "lubm/queries/test_pellet.sparql") 37 TestUtil.combinePaths(ontoDir, "lubm/queries/test_pellet.sparql")
30 }; 38 };
31 PagodaTester.main(args); 39 PagodaTester.main(args);
32 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/lubm1/pagoda_conj"); 40 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/pagoda_conj");
33 } 41 }
34 42
35 @Test 43 @Test
@@ -40,7 +48,7 @@ public class JAIR_PAGOdA {
40 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"), 48 TestUtil.combinePaths(ontoDir, "lubm/data/lubm1.ttl"),
41 TestUtil.combinePaths(ontoDir, "lubm/queries/atomic_lubm.sparql") 49 TestUtil.combinePaths(ontoDir, "lubm/queries/atomic_lubm.sparql")
42 ); 50 );
43 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/lubm1/pagoda_rolledUp"); 51 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/lubm1/pagoda_rolledUp");
44 } 52 }
45 53
46 @Test 54 @Test
@@ -52,7 +60,7 @@ public class JAIR_PAGOdA {
52 TestUtil.combinePaths(ontoDir, "uobm/queries/standard.sparql") 60 TestUtil.combinePaths(ontoDir, "uobm/queries/standard.sparql")
53 }; 61 };
54 PagodaTester.main(args); 62 PagodaTester.main(args);
55 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/uobm1/pagoda"); 63 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/pagoda");
56 } 64 }
57 65
58 @Test 66 @Test
@@ -64,7 +72,7 @@ public class JAIR_PAGOdA {
64 TestUtil.combinePaths(ontoDir, "uobm/queries/standard_pellet.sparql") 72 TestUtil.combinePaths(ontoDir, "uobm/queries/standard_pellet.sparql")
65 }; 73 };
66 PagodaTester.main(args); 74 PagodaTester.main(args);
67 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/uobm1/pagoda_conj"); 75 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/pagoda_conj");
68 } 76 }
69 77
70 @Test 78 @Test
@@ -76,7 +84,7 @@ public class JAIR_PAGOdA {
76 TestUtil.combinePaths(ontoDir, "uobm/queries/atomic_uobm.sparql") 84 TestUtil.combinePaths(ontoDir, "uobm/queries/atomic_uobm.sparql")
77 }; 85 };
78 PagodaTester.main(args); 86 PagodaTester.main(args);
79// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/uobm1/pagoda_rolledUp"); 87// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uobm1/pagoda_rolledUp");
80 } 88 }
81 89
82 @Test 90 @Test
@@ -88,7 +96,7 @@ public class JAIR_PAGOdA {
88 TestUtil.combinePaths(ontoDir, "fly/queries/fly_pellet.sparql") 96 TestUtil.combinePaths(ontoDir, "fly/queries/fly_pellet.sparql")
89 }; 97 };
90 PagodaTester.main(args); 98 PagodaTester.main(args);
91// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/fly/pagoda"); 99// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/fly/pagoda");
92 } 100 }
93 101
94 @Test 102 @Test
@@ -100,10 +108,9 @@ public class JAIR_PAGOdA {
100 TestUtil.combinePaths(ontoDir, "fly/queries/fly_pellet.sparql") 108 TestUtil.combinePaths(ontoDir, "fly/queries/fly_pellet.sparql")
101 }; 109 };
102 PagodaTester.main(args); 110 PagodaTester.main(args);
103 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/fly/pagoda_conj"); 111 TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/fly/pagoda_conj");
104 } 112 }
105 113
106
107 public void fly_rolledUp() { 114 public void fly_rolledUp() {
108 String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); 115 String ontoDir = TestUtil.getConfig().getProperty("ontoDir");
109 PagodaTester.main( 116 PagodaTester.main(
@@ -112,7 +119,7 @@ public class JAIR_PAGOdA {
112 null, 119 null,
113 TestUtil.combinePaths(ontoDir, "fly/queries/fly_atomic.sparql") 120 TestUtil.combinePaths(ontoDir, "fly/queries/fly_atomic.sparql")
114 ); 121 );
115// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/fly/pagoda_rolledUp"); 122// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/fly/pagoda_rolledUp");
116 } 123 }
117 124
118 public void dbpedia() { 125 public void dbpedia() {
@@ -124,7 +131,7 @@ public class JAIR_PAGOdA {
124 "dbpedia.ans" 131 "dbpedia.ans"
125 ); 132 );
126 133
127// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/dbpedia/pagoda"); 134// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/dbpedia/pagoda");
128 } 135 }
129 136
130 public void npd() { 137 public void npd() {
@@ -136,7 +143,7 @@ public class JAIR_PAGOdA {
136 , "npd.ans" 143 , "npd.ans"
137 ); 144 );
138 145
139// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/npd/pagoda"); 146// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/npd/pagoda");
140 } 147 }
141 148
142 public void reactome() throws IOException { 149 public void reactome() throws IOException {
@@ -151,7 +158,7 @@ public class JAIR_PAGOdA {
151 ); 158 );
152 TestUtil.copyFile("log4j.log", "output/jair/pagoda_reactome.example"); 159 TestUtil.copyFile("log4j.log", "output/jair/pagoda_reactome.example");
153 160
154// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/reactome/pagoda_10p"); 161// TestUtil.copyFile(("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/reactome/pagoda_10p");
155 } 162 }
156 163
157 public void chembl() throws IOException { 164 public void chembl() throws IOException {
@@ -164,7 +171,7 @@ public class JAIR_PAGOdA {
164 , "pagoda_chembl.ans" 171 , "pagoda_chembl.ans"
165 ); 172 );
166 TestUtil.copyFile("log4j.log", "output/jair/pagoda_chembl.example"); 173 TestUtil.copyFile("log4j.log", "output/jair/pagoda_chembl.example");
167// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/chembl/pagoda_1p"); 174// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/chembl/pagoda_1p");
168 } 175 }
169 176
170 public void uniprot() throws IOException { 177 public void uniprot() throws IOException {
@@ -178,16 +185,7 @@ public class JAIR_PAGOdA {
178 , "pagoda_uniprot.ans" 185 , "pagoda_uniprot.ans"
179 ); 186 );
180 TestUtil.copyFile("log4j.log", "output/jair/pagoda_uniprot.example"); 187 TestUtil.copyFile("log4j.log", "output/jair/pagoda_uniprot.example");
181// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/test-share/results_new/uniprot/pagoda_1p"); 188// TestUtil.copyFile("output/log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uniprot/pagoda_1p");
182 }
183
184
185 public static void main(String... args) {
186 try {
187 new JAIR_PAGOdA().lubm1();
188 } catch (IOException e) {
189 e.printStackTrace();
190 }
191 } 189 }
192 190
193} 191}
diff --git a/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java b/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java
index 687ffee..5feda35 100644
--- a/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java
+++ b/test/uk/ac/ox/cs/pagoda/global_tests/JAIR_Scalability.java
@@ -9,13 +9,18 @@ import java.io.IOException;
9 9
10public class JAIR_Scalability { 10public class JAIR_Scalability {
11 11
12 private static final String date = "_0123"; 12 private static final String date = "_0123";
13 13
14 public static void main(String... args) throws IOException {
15 Properties.shellModeDefault = true;
16 new JAIR_Scalability().testUniProt(50, false);
17 }
18
14 @Test 19 @Test
15 public void reactome() throws IOException { 20 public void reactome() throws IOException {
16 testReactome(10, false); 21 testReactome(10, false);
17 } 22 }
18 23
19 @Test 24 @Test
20 public void chembl() throws IOException { 25 public void chembl() throws IOException {
21 testChEMBL(1, false); 26 testChEMBL(1, false);
@@ -31,15 +36,15 @@ public class JAIR_Scalability {
31 String[] args = new String[] { 36 String[] args = new String[] {
32 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"), 37 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/biopax-level3-processed.owl"),
33 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/simplifed_sample_" + percentage + ".ttl"), 38 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/graph sampling/simplifed_sample_" + percentage + ".ttl"),
34 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/test.sparql") 39 TestUtil.combinePaths(ontoDir, "bio2rdf/reactome/queries/answersCorrectness.sparql")
35 , "reactome.ans" 40 , "reactome.ans"
36 }; 41 };
37 if (percentage == 10) 42 if (percentage == 10)
38 args[1] = args[1].replace("simplifed", "reactome"); 43 args[1] = args[1].replace("simplifed", "reactome");
39 44
40 PagodaTester.main(args); 45 PagodaTester.main(args);
41 if (save) 46 if (save)
42 TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/test-share/results_new/reactome/pagoda_" + percentage + "p" + date); 47 TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/reactome/pagoda_" + percentage + "p" + date);
43 } 48 }
44 49
45 public void testChEMBL(int percentage, boolean save) throws IOException { 50 public void testChEMBL(int percentage, boolean save) throws IOException {
@@ -48,18 +53,18 @@ public class JAIR_Scalability {
48 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/cco-noDPR.ttl"), 53 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/cco-noDPR.ttl"),
49 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/sample_" + percentage + ".nt"), 54 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/sample_" + percentage + ".nt"),
50// TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql") 55// TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/atomic_ground.sparql")
51 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/test.sparql") 56 TestUtil.combinePaths(ontoDir, "bio2rdf/chembl/queries/answersCorrectness.sparql")
52 , "chembl.ans" 57 , "chembl.ans"
53 }; 58 };
54 if (percentage == 1 || percentage == 10 || percentage == 50) 59 if (percentage == 1 || percentage == 10 || percentage == 50)
55 args[1] = args[1].replace("chembl", "chembl/graph sampling"); 60 args[1] = args[1].replace("chembl", "chembl/graph sampling");
56 else 61 else
57 if (percentage == 100) 62 if (percentage == 100)
58 args[1] = "/home/yzhou/RDFData/ChEMBL/facts/ChEMBL.ttl"; 63 args[1] = "/home/yzhou/RDFData/ChEMBL/facts/ChEMBL.ttl";
59 64
60 PagodaTester.main(args); 65 PagodaTester.main(args);
61 if (save) 66 if (save)
62 TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/test-share/results_new/chembl/pagoda_" + percentage + "p" + date); 67 TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/chembl/pagoda_" + percentage + "p" + date);
63 } 68 }
64 69
65 public void testUniProt(int percentage, boolean save) throws IOException { 70 public void testUniProt(int percentage, boolean save) throws IOException {
@@ -68,24 +73,19 @@ public class JAIR_Scalability {
68 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/core-sat-processed.owl"), 73 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/core-sat-processed.owl"),
69 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/sample_" + percentage + ".nt"), 74 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/sample_" + percentage + ".nt"),
70// TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql") 75// TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/atomic_ground.sparql")
71 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/test.sparql") 76 TestUtil.combinePaths(ontoDir, "bio2rdf/uniprot/queries/answersCorrectness.sparql")
72 , "uniprot.ans" 77 , "uniprot.ans"
73 }; 78 };
74 79
75 if (percentage == 1 || percentage == 10 || percentage == 50) 80 if (percentage == 1 || percentage == 10 || percentage == 50)
76 args[1] = args[1].replace("uniprot", "uniprot/graph sampling"); 81 args[1] = args[1].replace("uniprot", "uniprot/graph sampling");
77 else 82 else
78 if (percentage == 100) 83 if (percentage == 100)
79 args[1] = "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/data/uniprot_cleaned.nt"; 84 args[1] = "/home/yzhou/krr-nas-share/Yujiao/ontologies/bio2rdf/uniprot/data/uniprot_cleaned.nt";
80 85
81 PagodaTester.main(args); 86 PagodaTester.main(args);
82 if (save) 87 if (save)
83 TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/test-share/results_new/uniprot/pagoda_" + percentage + "p" + date); 88 TestUtil.copyFile("log4j.log", "/home/yzhou/java-workspace/answersCorrectness-share/results_new/uniprot/pagoda_" + percentage + "p" + date);
84 }
85
86 public static void main(String... args) throws IOException {
87 Properties.shellModeDefault = true;
88 new JAIR_Scalability().testUniProt(50, false);
89 } 89 }
90 90
91} 91}
diff --git a/test/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java b/test/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java
index b073a26..7fc5da8 100644
--- a/test/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java
+++ b/test/uk/ac/ox/cs/pagoda/global_tests/LightEvaluation.java
@@ -4,12 +4,13 @@ import org.testng.annotations.Test;
4 4
5import java.io.IOException; 5import java.io.IOException;
6 6
7@Deprecated
7public class LightEvaluation { 8public class LightEvaluation {
8 9
9 @Test 10 @Test
10 public void evaluation() throws IOException { 11 public void evaluation() throws IOException {
11 new TestPagodaUOBM().test(1); 12 new TestPagodaUOBM().answersCorrectness(1);
12 new TestPagodaLUBM().test(100); 13 new TestPagodaLUBM().answersCorrecntess(100);
13 new TestPagodaFLY().test(); 14 new TestPagodaFLY().test();
14 new TestPagodaDBPedia().test(); 15 new TestPagodaDBPedia().test();
15 new TestPagodaNPD().testNPDwithoutDataType(); 16 new TestPagodaNPD().testNPDwithoutDataType();
diff --git a/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java b/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java
index bb58681..8cbe022 100644
--- a/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java
+++ b/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaLUBM.java
@@ -4,42 +4,57 @@ import org.testng.annotations.Test;
4import uk.ac.ox.cs.pagoda.Pagoda; 4import uk.ac.ox.cs.pagoda.Pagoda;
5import uk.ac.ox.cs.pagoda.util.TestUtil; 5import uk.ac.ox.cs.pagoda.util.TestUtil;
6 6
7import java.io.File;
7import java.io.IOException; 8import java.io.IOException;
9import java.nio.file.Path;
8import java.nio.file.Paths; 10import java.nio.file.Paths;
9 11
10public class TestPagodaLUBM { 12public class TestPagodaLUBM {
11 13
12 public void test(int number) throws IOException { 14 /**
15 * Just execute on LUBM 100
16 */
17 public static void main(String... args) {
18 new TestPagodaLUBM().justExecute_100();
19 }
20
21 public void answersCorrecntess(int number) throws IOException {
13 String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); 22 String ontoDir = TestUtil.getConfig().getProperty("ontoDir");
23 Path computedAnswers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath());
24 new File(computedAnswers.toString()).deleteOnExit();
25
14 Pagoda pagoda = Pagoda.builder() 26 Pagoda pagoda = Pagoda.builder()
15 .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl")) 27 .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl"))
16 .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl")) 28 .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl"))
17 .query(Paths.get(ontoDir, "lubm/queries/test.sparql")) 29 .query(Paths.get(ontoDir, "lubm/queries/test.sparql"))
30 .answer(computedAnswers)
18 .classify(true) 31 .classify(true)
19 .hermit(true) 32 .hermit(true)
20 .build(); 33 .build();
21 CheckAnswersOverDataset.check(pagoda, Paths.get(ontoDir, "lubm/lubm" + number + ".json")); 34 pagoda.run();
35
36 Path givenAnswers = Paths.get(ontoDir, "lubm/lubm" + number + ".json");
37 CheckAnswers.assertSameAnswers(computedAnswers, givenAnswers);
22 } 38 }
23 39
24 @Test 40 @Test(groups = {"light"})
25 public void test_1() throws IOException { 41 public void answersCorrectness_1() throws IOException {
26 test(1); 42 answersCorrecntess(1);
27 } 43 }
28 44
45 /**
46 * Just execute on LUBM 100
47 * */
29 public void justExecute_100() { 48 public void justExecute_100() {
30 int number = 100; 49 int number = 100;
31 String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); 50 String ontoDir = TestUtil.getConfig().getProperty("ontoDir");
32 Pagoda pagoda = Pagoda.builder() 51 Pagoda pagoda = Pagoda.builder()
33 .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl")) 52 .ontology(Paths.get(ontoDir, "lubm/univ-bench.owl"))
34 .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl")) 53 .data(Paths.get(ontoDir, "lubm/data/lubm" + number + ".ttl"))
35 .query(Paths.get(ontoDir, "lubm/queries/test.sparql")) 54 .query(Paths.get(ontoDir, "lubm/queries/answersCorrectness.sparql"))
36 .classify(true) 55 .classify(true)
37 .hermit(true) 56 .hermit(true)
38 .build(); 57 .build();
39 pagoda.run(); 58 pagoda.run();
40 } 59 }
41
42 public static void main(String... args) {
43 new TestPagodaLUBM().justExecute_100();
44 }
45} 60}
diff --git a/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java b/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java
index 61235d3..23d79ab 100644
--- a/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java
+++ b/test/uk/ac/ox/cs/pagoda/global_tests/TestPagodaUOBM.java
@@ -6,48 +6,57 @@ import uk.ac.ox.cs.pagoda.Pagoda;
6import uk.ac.ox.cs.pagoda.tester.PagodaTester; 6import uk.ac.ox.cs.pagoda.tester.PagodaTester;
7import uk.ac.ox.cs.pagoda.util.TestUtil; 7import uk.ac.ox.cs.pagoda.util.TestUtil;
8 8
9import java.io.File;
9import java.io.IOException; 10import java.io.IOException;
11import java.nio.file.Path;
10import java.nio.file.Paths; 12import java.nio.file.Paths;
11 13
12import static uk.ac.ox.cs.pagoda.util.TestUtil.combinePaths; 14import static uk.ac.ox.cs.pagoda.util.TestUtil.combinePaths;
13 15
14public class TestPagodaUOBM { 16public class TestPagodaUOBM {
15 17
16 public void test(int number) throws IOException { 18
19 private static final int N_1 = 1;
20 private static final int N_2 = 10;
21
22 @DataProvider(name = "uobmNumbers")
23 public static Object[][] uobmNumbers() {
24 Integer[][] integers = new Integer[N_2 - N_1 + 1][1];
25 for(int i = 0; i < N_2 - N_1 + 1; i++)
26 integers[i][0] = N_1 + i;
27 return integers;
28 }
29
30 public void answersCorrectness(int number) throws IOException {
17 String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); 31 String ontoDir = TestUtil.getConfig().getProperty("ontoDir");
32 Path computedAnswers = Paths.get(File.createTempFile("answers", ".json").getAbsolutePath());
33 new File(computedAnswers.toString()).deleteOnExit();
34
18 Pagoda pagoda = Pagoda.builder() 35 Pagoda pagoda = Pagoda.builder()
19 .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl")) 36 .ontology(Paths.get(ontoDir, "uobm/univ-bench-dl.owl"))
20 .data(Paths.get(ontoDir, "uobm/data/uobm" + number + ".ttl")) 37 .data(Paths.get(ontoDir, "uobm/data/uobm" + number + ".ttl"))
21 .query(Paths.get(ontoDir, "uobm/queries/test.sparql")) 38 .query(Paths.get(ontoDir, "uobm/queries/test.sparql"))
39 .answer(computedAnswers)
22 .classify(true) 40 .classify(true)
23 .hermit(true) 41 .hermit(true)
24 .build(); 42 .build();
25 CheckAnswersOverDataset.check(pagoda, 43 pagoda.run();
26 Paths.get(ontoDir, "uobm/uobm" + number + ".json"));
27 }
28 44
29 @Test 45 String given_answers = "uobm/uobm" + number + ".json";
30 public void test_1() throws IOException { 46 CheckAnswers.assertSameAnswers(computedAnswers, Paths.get(ontoDir, given_answers));
31 test(1);
32 } 47 }
33 48
34 private static final int N_1 = 1; 49 @Test(groups = {"light"})
35 private static final int N_2 = 10; 50 public void answersCorrectness_1() throws IOException {
36 51 answersCorrectness(1);
37 @DataProvider(name = "uobmNumbers")
38 public static Object[][] uobmNumbers() {
39 Integer[][] integers = new Integer[N_2 - N_1 + 1][1];
40 for (int i = 0; i < N_2 - N_1 + 1; i++)
41 integers[i][0]= N_1 + i;
42 return integers;
43 } 52 }
44 53
45 @Test(dataProvider = "uobmNumbers") 54 @Test(groups = {"heavy"}, dataProvider = "uobmNumbers")
46 public void justExecute(int number) { 55 public void justExecute(int number) {
47 String ontoDir = TestUtil.getConfig().getProperty("ontoDir"); 56 String ontoDir = TestUtil.getConfig().getProperty("ontoDir");
48 PagodaTester.main(combinePaths(ontoDir, "uobm/univ-bench-dl.owl"), 57 PagodaTester.main(combinePaths(ontoDir, "uobm/univ-bench-dl.owl"),
49 combinePaths(ontoDir, "uobm/data/uobm" + number + ".ttl"), 58 combinePaths(ontoDir, "uobm/data/uobm" + number + ".ttl"),
50 combinePaths(ontoDir, "uobm/queries/test.sparql")); 59 combinePaths(ontoDir, "uobm/queries/answersCorrectness.sparql"));
51 } 60 }
52 61
53} 62}
diff --git a/test/uk/ac/ox/cs/pagoda/tester/PagodaTester.java b/test/uk/ac/ox/cs/pagoda/tester/PagodaTester.java
index 7d9b49c..274946d 100644
--- a/test/uk/ac/ox/cs/pagoda/tester/PagodaTester.java
+++ b/test/uk/ac/ox/cs/pagoda/tester/PagodaTester.java
@@ -1,46 +1,318 @@
1package uk.ac.ox.cs.pagoda.tester; 1package uk.ac.ox.cs.pagoda.tester;
2 2
3import uk.ac.ox.cs.pagoda.query.AnswerTuple;
4import uk.ac.ox.cs.pagoda.query.AnswerTuples;
3import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner; 5import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
4import uk.ac.ox.cs.pagoda.util.Properties; 6import uk.ac.ox.cs.pagoda.util.Properties;
5import uk.ac.ox.cs.pagoda.util.Timer; 7import uk.ac.ox.cs.pagoda.util.Timer;
6import uk.ac.ox.cs.pagoda.util.Utility; 8import uk.ac.ox.cs.pagoda.util.Utility;
7 9
8// TODO clean it, or code another one 10import java.io.File;
11import java.io.FileNotFoundException;
12import java.io.IOException;
13import java.util.Scanner;
14
15@Deprecated
9public class PagodaTester { 16public class PagodaTester {
10 17
18 // public static final String onto_dir = "/media/RDFData/yzhou/";
19// public static final String onto_dir = "/users/yzhou/ontologies/";
20// public static final String onto_dir = "/home/scratch/yzhou/ontologies/";
21 public static final String onto_dir = "/home/alessandro/Big_files/Ontologies/";
22
23 public static final String fly = onto_dir + "fly/fly_anatomy_XP_with_GJ_FC_individuals.owl";
24 public static final String fly_query = onto_dir + "fly/queries/fly.sparql";
25
26 public static final String test_tbox = onto_dir + "smallExampleFromAna/dummy.owl";
27 public static final String test_abox = onto_dir + "smallExampleFromAna/initialABox.ttl";
28 public static final String test_query = onto_dir + "smallExampleFromAna/queries.dlog";
29
30 public static final int lubm_number = 1;
31 public static final String lubm_tbox = onto_dir + "lubm/univ-bench.owl";
32 public static final String lubm_abox = onto_dir + "lubm/data/lubm" + lubm_number + ".ttl";
33 public static final String lubm_abox_copy = onto_dir + "lubm/data/lubm" + lubm_number + " (copy).ttl";
34 public static final String lubm_query = onto_dir + "lubm/queries/test.sparql";
35 public static final String lubm_query6 = onto_dir + "lubm/queries/test_q6.sparql";
36 public static final String lubm_query20 = onto_dir + "lubm/queries/test_q16.sparql";
37
38 public static final int uobm_number = 1;
39 public static final String uobm_tbox = onto_dir + "uobm/univ-bench-dl.owl";
40 public static final String uobm_abox = onto_dir + "uobm/data/uobm" + uobm_number + ".ttl";
41 public static final String uobm_query = onto_dir + "uobm/queries/test.sparql";
42 public static final String uobm_query_temp = onto_dir + "uobm/queries/temp.sparql";
43 public static final String uobm_query2 = onto_dir + "uobm/queries/standard_q2.sparql";
44 public static final String uobm_query9 = onto_dir + "uobm/queries/standard_q9.sparql";
45 public static final String uobm_query11 = onto_dir + "uobm/queries/standard_q11.sparql";
46 public static final String uobm_query12 = onto_dir + "uobm/queries/standard_q12.sparql";
47 public static final String uobm_query14 = onto_dir + "uobm/queries/standard_q14.sparql";
48 public static final String uobm_query15 = onto_dir + "uobm/queries/standard_q15.sparql";
49 public static final String uobm_query_multi = onto_dir + "uobm/queries/standard_multi.sparql";
50 public static final String uobm_generated_query1 = onto_dir + "uobm/queries/generated_q1.sparql";
51 public static final String uobm_query_group3 = onto_dir + "uobm/queries/standard_group3.sparql";
52
53 public static final String npd_tbox = onto_dir + "npd/npd-all-minus-datatype.owl";
54 // "npd/npd-all.owl";
55 // "npd-all-minus-datatype.owl";
56 public static final String npd_abox = onto_dir + "npd/data/npd-data-dump-minus-datatype-new.ttl";
57 // "npd/data/npd-data-dump-processed.ttl";
58 // "npd-data-dump-minus-datatype-old.ttl";
59 public static final String npd_query = onto_dir + "npd/queries/atomic.sparql";
60
61 public static final String npd_bench_tbox = onto_dir + "npd-benchmark/npd-v2-ql_a.owl";
62 // npd-all-minus-datatype.owl";
63 public static final String npd_bench_abox = onto_dir + "npd-benchmark/npd-v2-ql_a.ttl";
64 // npd-data-dump-minus-datatype-old.ttl";
65 public static final String npd_bench_query = onto_dir + "npd-benchmark/queries/all.sparql";
66
67 public static final String dbpedia_tbox = onto_dir + "dbpedia/integratedOntology-all-in-one-minus-datatype.owl";
68 public static final String dbpedia_abox = onto_dir + "dbpedia/data/dbpedia-minus-datatype-new.ttl";
69 public static final String dbpedia_query = onto_dir + "dbpedia/queries/atomic_ground.sparql";
70 public static final String dbpedia_query274 = onto_dir + "dbpedia/atomic_q274.sparql";
71
72 public static final String dbpedia_latest_tbox = onto_dir + "dbpedia/dbpedia_2014.owl";
73 public static final String travel_tbox = onto_dir + "dbpedia/travel.owl";
74 public static final String dbpedia_tbox_simple = onto_dir + "dbpedia/dbpedia_simple.owl";
75
76 public static final String bioModels_tbox = onto_dir + "biomodels/biomodels-21.owl";
77 public static final String bioModels_abox = onto_dir + "biomodels/data_processed_1.ttl";
78 public static final String bioModels_queries = onto_dir + "biomodels/queries/queries.sparql";
79
80 public static final String chembl_tbox = onto_dir + "bio2rdf/chembl/cco-processed-noDPR-noDPD.ttl";
81 public static final String chembl_abox = onto_dir + "bio2rdf/chembl/graph sampling old/sample_100.nt";
82 public static final String chembl_queries = onto_dir + "bio2rdf/chembl/queries/problematic.sparql";
83 //"bio2rdf/chembl/queries/atomic_one_filtered.sparql"; //
84
85 public static final String reactome_tbox = onto_dir + "bio2rdf/reactome/biopax-level3-processed.owl";
86 public static final String reactome_abox = onto_dir + "bio2rdf/reactome/graph sampling old/sample.ttl";
87 //data/data.ttl"; //graph sampling old/reactome_sample_10.ttl"; //
88 public static final String reactome_queries = onto_dir + "bio2rdf/reactome/queries/atomic.sparql";
89
90 public static final String uniprot_tbox = onto_dir + "bio2rdf/uniprot/core-processed.owl";
91 public static final String uniprot_abox = onto_dir + "bio2rdf/uniprot/graph sampling/sample_1.nt";
92 public static final String uniprot_queries = onto_dir + "bio2rdf/uniprot/queries/atomic_one.sparql";
93
94 public static final String atlas_tbox = onto_dir + "bio2rdf/atlas/gxaterms.owl";
95 public static final String atlas_abox = onto_dir + "bio2rdf/atlas/graph sampling/sample_1.nt";
96 public static final String atlas_queries = onto_dir + "bio2rdf/atlas/queries/atomic_one.sparql";
97 QueryReasoner pagoda;
98
99 // private void printPredicatesWithGap() {
100// for (String p: ((MyQueryReasoner) pagoda).getPredicatesWithGap()) {
101// System.out.println(p);
102// }
103// }
104 Timer timer = new Timer();
105
106 public PagodaTester(QueryReasoner reasoner) {
107 pagoda = reasoner;
108 }
109
11 public static void main(String... args) { 110 public static void main(String... args) {
12// Properties properties = new Properties(PagodaTester.class. 111 if(args.length == 0) {
13// getClassLoader().getResource("uobm.properties").getPath()); 112// args = new String[] {test_tbox, test_abox, test_query};
14 Properties properties = new Properties(); 113// args = new String[] {lubm_tbox, lubm_abox, lubm_query};
15 114// args = new String[] {uobm_tbox, uobm_abox, uobm_query};
16 int index = 0; 115// args = new String[] {fly, "null", fly_query};
17 if (args.length > index) properties.setOntologyPath(args[index++]); 116// args = new String[] {dbpedia_tbox, dbpedia_abox, dbpedia_query};
18 if (args.length > index && (args[index].endsWith(".ttl") || args[index].endsWith(".nt"))) properties.setDataPath(args[index++]); 117// args = new String[] {travel_tbox, null, dbpedia_query274};
19 if (args.length > index && args[index].endsWith(".sparql")) properties.setQueryPath(args[index++]); 118 args = new String[]{fly, fly_query};
20 if (args.length > index && !args[index].startsWith("-")) properties.setAnswerPath(args[index++]); 119// args = new String[] {npd_tbox, npd_abox, npd_query};
21 if (args.length > index) properties.setToClassify(Boolean.parseBoolean(args[index++].substring(1))); 120// args = new String[] {npd_bench_tbox, npd_bench_abox, npd_bench_query};
22 if (args.length > index) properties.setToCallHermiT(Boolean.parseBoolean(args[index++].substring(1))); 121// args = new String[] {"../SemFacet/WebContent/WEB-INF/data/dbpedia.owl", "../SemFacet/WebContent/WEB-INF/data/dbpediaA.nt", null};
23 122// args = new String[] {"../core/WebContent/WEB-INF/data/fly.owl", "../core/WebContent/WEB-INF/data/fly-data.nt", null};
123// args = new String[] {"data/lubm/univ-bench.owl", "data/lubm/lubm1.ttl", "data/lubm/lubm.sparql", "lubm.ans"};
124// args = new String[] {"data/uobm/univ-bench-dl.owl", "data/uobm/uobm1.ttl", "data/uobm/uobm.sparql", "uobm.ans"};
125// args = new String[] {"data/fly/fly_anatomy_XP_with_GJ_FC_individuals.owl", "data/fly/fly.sparql", "fly.ans"};
126// args = new String[] {bioModels_tbox, bioModels_abox, bioModels_queries};
127// args = new String[] {chembl_tbox, chembl_abox, chembl_queries};
128// args = new String[] {reactome_tbox, reactome_abox, reactome_queries};
129// args = new String[] {reactome_tbox, "/users/yzhou/temp/reactome_debug.ttl", onto_dir +"bio2rdf/reactome/queries/atomic_one_q65.sparql"};
130// args = new String[] {uniprot_tbox.replace(".owl", "-noDis.owl"), "/users/yzhou/temp/uniprot_debug/sample_1_string.nt", uniprot_queries};
131// args = new String[] {uniprot_tbox.replace(".owl", "-noDis.owl"), uniprot_abox, uniprot_queries};
132// args = new String[] {atlas_tbox, atlas_abox, atlas_queries};
133// args = new String[] {onto_dir + "test/unsatisfiable.owl", null, onto_dir + "test/unsatisfiable_queries.sparql"};
134// args = new String[] {onto_dir + "test/jair-example.owl", null, onto_dir + "test/jair-example_query.sparql"};
135// args[2] = args[2].replace(".sparql", "_all_pagoda.sparql");
136// args[2] = args[2].replace(".sparql", "_pellet.sparql");
137 }
138
139 Properties properties = new Properties("config/uobm.properties");
140
141 int index = 0;
142 if(args.length > index) properties.setOntologyPath(args[index++]);
143 if(args.length > index && (args[index].endsWith(".ttl") || args[index].endsWith(".nt")))
144 properties.setDataPath(args[index++]);
145 if(args.length > index && args[index].endsWith(".sparql")) properties.setQueryPath(args[index++]);
146 if(args.length > index && !args[index].startsWith("-")) properties.setAnswerPath(args[index++]);
147 if(args.length > index) properties.setToClassify(Boolean.parseBoolean(args[index++].substring(1)));
148 if(args.length > index) properties.setToCallHermiT(Boolean.parseBoolean(args[index++].substring(1)));
149
24 Utility.logInfo("Ontology file: " + properties.getOntologyPath()); 150 Utility.logInfo("Ontology file: " + properties.getOntologyPath());
25 Utility.logInfo("Data files: " + properties.getDataPath()); 151 Utility.logInfo("Data files: " + properties.getDataPath());
26 Utility.logInfo("Query files: " + properties.getQueryPath()); 152 Utility.logInfo("Query files: " + properties.getQueryPath());
27 Utility.logInfo("Answer file: " + properties.getAnswerPath()); 153 Utility.logInfo("Answer file: " + properties.getAnswerPath());
28 154
29 QueryReasoner pagoda = null; 155 QueryReasoner pagoda = null;
30 156
31 try { 157 try {
32 Timer t = new Timer(); 158 Timer t = new Timer();
33 pagoda = QueryReasoner.getInstance(properties); 159 pagoda = QueryReasoner.getInstance(properties);
34 if (pagoda == null) return; 160 if (pagoda == null) return;
35 161
36 Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds."); 162 Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds.");
37 163
38 if (properties.getQueryPath() != null) 164 if (properties.getQueryPath() != null)
39 for (String queryFile: properties.getQueryPath().split(";")) 165 for (String queryFile: properties.getQueryPath().split(";"))
40 pagoda.evaluate(pagoda.getQueryManager().collectQueryRecords(queryFile)); 166 pagoda.evaluate(pagoda.getQueryManager().collectQueryRecords(queryFile));
167
168 if(properties.getShellMode())
169 try {
170 evaluateConsoleQuery(pagoda);
171 } catch(IOException e) {
172 e.printStackTrace();
173 }
41 } finally { 174 } finally {
42 if (pagoda != null) pagoda.dispose(); 175 if (pagoda != null) pagoda.dispose();
43 } 176 }
177
178// Utility.closeCurrentOut();
179
180 if(properties.getShellMode()) System.exit(0);
181 }
182
183 private static void evaluateConsoleQuery(QueryReasoner pagoda) throws IOException {
184 int ending = (int) '$', symbol;
185 while(true) {
186 Utility.logInfo("Input your query ending with $");
187 StringBuilder queryBuilder = new StringBuilder();
188 while((symbol = System.in.read()) != ending) {
189 queryBuilder.append((char) symbol);
190 }
191 System.in.read();
192 if(queryBuilder.length() == 0) return;
193 pagoda.evaluate_shell(queryBuilder.toString());
194 }
195 }
196
197 void testReactomeQueries() {
198 evaluate("select ?x where { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.biopax.org/release/biopax-level3.owl#DnaReference> . }");
199 evaluate("select ?y ?z where { <http://identifiers.org/ensembl/ENSG00000157557> ?y ?z . }");
200 evaluate("select ?y where { <http://identifiers.org/ensembl/ENSG00000157557> <http://www.biopax.org/release/biopax-level3.owl#name> ?y . }", true);
201
202 }
203
204 void testSemFacetQueries() {
205// try {
206// BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream("query.line")));
207// for (String line; (line = reader.readLine()) != null && !line.isEmpty(); )
208// evaluate(line, true);
209// reader.close();
210// } catch (FileNotFoundException e) {
211// e.printStackTrace();
212// } catch (IOException e) {
213// e.printStackTrace();
214// }
215 evaluate("select ?x ?z where { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z }", true);
216 evaluate("select distinct ?y where { ?x ?y ?z }", true);
217 evaluate("select distinct ?z where { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z }", true);
218 evaluate("select ?y ?z where { <http://www.reactome.org/biopax/46/49633#Protein3885> ?y ?z .}", true);
219 }
220
221 void testISGQueries() {
222 evaluate("select ?z where {<http://cs.ox.ac.uk/Evgeny_Kharlamov> <http://cs.ox.ac.uk/lat> ?z .}", false);
223 evaluate("select ?x where {?x <http://cs.ox.ac.uk/type> <http://cs.ox.ac.uk/person> .}", false);
224 }
225
226 void testSomeTravelQueries() {
227 evaluate("select ?y ?z where {<http://www.owl-ontologies.com/travel.owl#BlueMountains> ?y ?z. }", true);
228 evaluate("select ?x where {?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.owl-ontologies.com/travel.owl#RetireeDestination>. }");
229 evaluate("select ?x where {?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.owl-ontologies.com/travel.owl#BackpackersDestination>. }");
230 }
231
232 void testSomeFlyQueries() {
233 evaluate("select ?x where { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00005106> . }", false);
234
235 evaluate("select DISTINCT ?z where { ?x <http://purl.obolibrary.org/obo/FBbt#develops_from> ?any . ?any <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z . ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00067123> . } ", true);
236
237 evaluate("Select ?x where { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> "
238 + "<http://purl.obolibrary.org/obo/FBbt_00067123>. ?x "
239 + "<http://purl.obolibrary.org/obo/RO_0002131> ?any . ?any "
240 + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> "
241 + "<http://purl.obolibrary.org/obo/FBbt_00005140> . }", true);
242
243 evaluate("Select ?x where {?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> "
244 + "<http://purl.obolibrary.org/obo/FBbt_00067363> . ?x "
245 + "<http://purl.obolibrary.org/obo/RO_0002131> ?any . ?any "
246 + "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> "
247 + "<http://purl.obolibrary.org/obo/FBbt_00005140> . }", true);
248
249// evaluate("Select ?x where { "
250// + "?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00003660>. "
251// + "?x <http://purl.obolibrary.org/obo/FBbt#develops_from> ?any . "
252// + "?any <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00001446> . }", true);
253
254 evaluate("select DISTINCT ?z where { ?x <http://purl.obolibrary.org/obo/RO_0002110> ?any . "
255 + "?any <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z . "
256 + "?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00007016> . } ", true);
257
258 evaluate("Select * where {"
259 + "<http://www.virtualflybrain.org/ontologies/individuals/VFB_00100607> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00007364>. "
260 + "<http://www.virtualflybrain.org/ontologies/individuals/VFB_00100607> <http://www.w3.org/2002/07/owl#sameAs> ?z }", true);
261
262 evaluate("SELECT DISTINCT ?x ?z WHERE {?x <http://www.w3.org/2002/07/owl#sameAs> ?z}", true);
263 evaluate("SELECT DISTINCT ?x ?z WHERE {?x <http://purl.obolibrary.org/obo/BFO_0000051> ?z}", true);
264
265 evaluate("select DISTINCT ?y where { ?x ?y ?z . "
266 + "?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://purl.obolibrary.org/obo/FBbt_00007364> }", true);
267
268 evaluateQueriesFromFile("/users/yzhou/Downloads/logs(1).log");
269 evaluateQueriesFromFile("/users/yzhou/Downloads/logs.log");
270
271 evaluate("SELECT DISTINCT ?x ?z WHERE {?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z}", true);
272 evaluate("SELECT DISTINCT ?x ?z WHERE {?x <http://xmlns.com/foaf/0.1/depicts> ?z}", true);
273
274 evaluate("select ?x ?z where { ?x <http://www.w3.org/2002/07/owl#sameAs> ?z } ", true);
275 evaluate("select ?x ?z where { ?x <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> ?z } ", true);
276 }
277
278 public void evaluateQueriesFromFile(String fileName) {
279 Scanner scanner = null;
280 try {
281 scanner = new Scanner(new File(fileName));
282 String line;
283 while(scanner.hasNextLine()) {
284 line = scanner.nextLine();
285 if(line.startsWith("select"))
286 evaluate(line, true);
287 }
288 } catch(FileNotFoundException e) {
289 e.printStackTrace();
290 } finally {
291 if(scanner != null)
292 scanner.close();
293 }
294 }
295
296 private void evaluate(String query) {
297 evaluate(query, false);
298 }
299
300 private void evaluate(String query, boolean tag) {
301 timer.reset();
302 AnswerTuples tuples = pagoda.evaluate(query, tag);
303 int arity = tuples.getArity();
304 int count = 0;
305 for(AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) {
306 tuple = tuples.getTuple();
307 for(int i = 0; i < arity; ++i)
308 tuple.getGroundTerm(i).toString();
309// System.out.print(tuple.getGroundTerm(i).toString() + "\t");
310// System.out.println();
311 ++count;
312 }
313 tuples.dispose();
314 Utility.logInfo("The number of answers for this SemFacet query: " + count);
315 Utility.logInfo("Total time for this SemFacet query: " + timer.duration());
44 } 316 }
45 317
46} 318}
diff --git a/test/uk/ac/ox/cs/pagoda/tester/Statistics.java b/test/uk/ac/ox/cs/pagoda/tester/Statistics.java
index 71f1726..13d7f90 100644
--- a/test/uk/ac/ox/cs/pagoda/tester/Statistics.java
+++ b/test/uk/ac/ox/cs/pagoda/tester/Statistics.java
@@ -6,6 +6,7 @@ import java.util.Iterator;
6import java.util.LinkedList; 6import java.util.LinkedList;
7import java.util.Scanner; 7import java.util.Scanner;
8 8
9@Deprecated
9public class Statistics { 10public class Statistics {
10 11
11 double satCheckTime; 12 double satCheckTime;
diff --git a/test/uk/ac/ox/cs/pagoda/util/TestUtil.java b/test/uk/ac/ox/cs/pagoda/util/TestUtil.java
index 1802147..ad0d494 100644
--- a/test/uk/ac/ox/cs/pagoda/util/TestUtil.java
+++ b/test/uk/ac/ox/cs/pagoda/util/TestUtil.java
@@ -26,8 +26,7 @@ public class TestUtil {
26 26
27 public static Properties getConfig() { 27 public static Properties getConfig() {
28 if(!isConfigLoaded) { 28 if(!isConfigLoaded) {
29 try (InputStream in = TestUtil.class.getClassLoader() 29 try(InputStream in = TestUtil.class.getClassLoader().getResourceAsStream(CONFIG_FILE)) {
30 .getResourceAsStream(CONFIG_FILE)) {
31 config = new java.util.Properties(); 30 config = new java.util.Properties();
32 config.load(in); 31 config.load(in);
33 in.close(); 32 in.close();