aboutsummaryrefslogtreecommitdiff
path: root/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
diff options
context:
space:
mode:
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java')
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java266
1 files changed, 0 insertions, 266 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
deleted file mode 100644
index e8daa3b..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
+++ /dev/null
@@ -1,266 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import com.google.gson.Gson;
4import org.semanticweb.owlapi.model.OWLOntology;
5import uk.ac.ox.cs.pagoda.owl.OWLHelper;
6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
7import uk.ac.ox.cs.pagoda.query.QueryManager;
8import uk.ac.ox.cs.pagoda.query.QueryRecord;
9import uk.ac.ox.cs.pagoda.util.PagodaProperties;
10import uk.ac.ox.cs.pagoda.util.Timer;
11import uk.ac.ox.cs.pagoda.util.Utility;
12import uk.ac.ox.cs.pagoda.util.disposable.Disposable;
13import uk.ac.ox.cs.pagoda.util.disposable.DisposedException;
14
15import java.io.BufferedWriter;
16import java.io.File;
17import java.io.IOException;
18import java.nio.file.Files;
19import java.nio.file.Paths;
20import java.util.Collection;
21
22// TODO clean APIs
23public abstract class QueryReasoner extends Disposable {
24
25 public static final String ImportDataFileSeparator = ";";
26 private static final boolean DEFAULT_MULTI_STAGES = true;
27 private static final boolean DEFAULT_EQUALITIES = true;
28 public boolean fullReasoner = this instanceof MyQueryReasoner;
29 // protected boolean forSemFacet = false;
30 PagodaProperties properties;
31 BufferedWriter answerWriter = null;
32 private StringBuilder importedData = new StringBuilder();
33 private QueryManager m_queryManager = new QueryManager();
34
35 public static QueryReasoner getInstance(PagodaProperties p) {
36 OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath());
37 QueryReasoner pagoda = getInstance(ontology, p);
38 pagoda.properties = p;
39 pagoda.loadOntology(ontology);
40 pagoda.importData(p.getDataPath());
41 if(pagoda.preprocess()) {
42 Utility.logInfo("The ontology is consistent!");
43 return pagoda;
44 }
45 else {
46 System.out.println("The ontology is inconsistent!");
47 pagoda.dispose();
48 return null;
49 }
50 }
51
52 public static QueryReasoner getInstance(OWLOntology o) {
53 QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
54 pagoda.properties = new PagodaProperties();
55 return pagoda;
56 }
57
58 private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) {
59 return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
60 }
61
62 public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) {
63// Utility.initialise();
64 QueryReasoner reasoner;
65 if(OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner();
66 else if(OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner();
67 else
68 switch(type) {
69 case RLU:
70 reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities);
71 break;
72 case ELHOU:
73 reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities);
74 break;
75 default:
76 reasoner = new MyQueryReasoner(performMultiStages, considerEqualities);
77 }
78 return reasoner;
79 }
80
81 public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) {
82 return new HermiTReasoner(toCheckSatisfiability);
83 }
84
85 public void setToClassify(boolean flag) {
86 if(isDisposed()) throw new DisposedException();
87 properties.setToClassify(flag);
88 }
89
90 public void setToCallHermiT(boolean flag) {
91 if(isDisposed()) throw new DisposedException();
92 properties.setToCallHermiT(flag);
93 }
94
95 public void importData(String datafile) {
96 if(isDisposed()) throw new DisposedException();
97 if(datafile != null && !datafile.equalsIgnoreCase("null"))
98 importData(datafile.split(ImportDataFileSeparator));
99 }
100
101 public void importData(String[] datafiles) {
102 if(isDisposed()) throw new DisposedException();
103 if(datafiles != null) {
104 for(String datafile : datafiles) {
105 File file = new File(datafile);
106 if(file.exists()) {
107 if(file.isFile()) importDataFile(file);
108 else importDataDirectory(file);
109 }
110 else {
111 Utility.logError("warning: file " + datafile + " doesn't exists.");
112 }
113 }
114 }
115 }
116
117 public abstract void loadOntology(OWLOntology ontology);
118
119 public abstract boolean preprocess();
120
121 public abstract boolean isConsistent();
122
123 public abstract void evaluate(QueryRecord record);
124
125 public abstract void evaluateUpper(QueryRecord record);
126
127 public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) {
128 if(isDisposed()) throw new DisposedException();
129 if(forFacetGeneration) {
130 QueryRecord record = m_queryManager.create(queryText);
131 Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText);
132 if(!record.isProcessed())
133 evaluateUpper(record);
134// AnswerTuples tuples = record.getUpperBoundAnswers();
135// for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) {
136// tuple = tuples.getTuple();
137// if (tuple.toString().contains("NC"))
138// System.out.println(tuple.toString());
139// }
140 return record.getUpperBoundAnswers();
141 }
142 else
143 return evaluate(queryText);
144 }
145
146 public AnswerTuples evaluate(String queryText) {
147 if(isDisposed()) throw new DisposedException();
148 QueryRecord record = m_queryManager.create(queryText);
149 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
150 if(!record.isProcessed())
151 evaluate(record);
152 AnswerTuples answer = record.getAnswers();
153 record.dispose();
154 return answer;
155
156 }
157
158 public void evaluate_shell(String queryText) {
159 if(isDisposed()) throw new DisposedException();
160 QueryRecord record = m_queryManager.create(queryText);
161 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
162 if(!record.isProcessed())
163 evaluate(record);
164 Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple());
165 record.dispose();
166
167 }
168
169 public void evaluate(Collection<QueryRecord> queryRecords) {
170 if(isDisposed()) throw new DisposedException();
171 if(!isConsistent()) {
172 Utility.logDebug("The ontology and dataset is inconsistent.");
173 return;
174 }
175
176 if(properties.getAnswerPath() != null && answerWriter == null) {
177 try {
178 answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath()));
179 } catch(IOException e) {
180 Utility.logError("The answer path is not valid!");
181 e.printStackTrace();
182 }
183 }
184
185 Timer t = new Timer();
186 Gson gson = QueryRecord.GsonCreator.getInstance();
187 for(QueryRecord record : queryRecords) {
188// if (Integer.parseInt(record.getQueryID()) != 218) continue;
189 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------",
190 record.getQueryText());
191 if(!record.isProcessed()) {
192 t.reset();
193 if(!record.isProcessed())
194 evaluate(record);
195 Utility.logInfo("Total time to answer this query: " + t.duration());
196 Utility.logInfo("Difficulty of this query: " + record.getDifficulty());
197 if(!fullReasoner && !record.isProcessed()) {
198 Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds.");
199 continue;
200 }
201 }
202 record.outputAnswerStatistics();
203 record.outputTimes();
204 }
205 /* TODO it can handle one call only
206 if you call twice, you will end up with a json file with multiple roots */
207 if(answerWriter != null) gson.toJson(queryRecords, answerWriter);
208// queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record)));
209 queryRecords.stream().forEach(QueryRecord::dispose);
210 }
211
212 @Override
213 public void dispose() {
214 super.dispose();
215 if(answerWriter != null) {
216 try {
217 answerWriter.close();
218 } catch(IOException e) {
219 e.printStackTrace();
220 }
221 }
222// Utility.cleanup();
223 }
224
225// public void evaluate(Collection<QueryRecord> queryRecords) {
226// evaluate(queryRecords);
227// }
228
229 public QueryManager getQueryManager() {
230 if(isDisposed()) throw new DisposedException();
231 return m_queryManager;
232 }
233
234 protected String getImportedData() {
235 return importedData.toString();
236 }
237
238 private void importDataDirectory(File file) {
239 for(File child : file.listFiles())
240 if(child.isFile()) importDataFile(child);
241 else importDataDirectory(child);
242 }
243
244 private void importDataFile(File file) {
245 String datafile;
246 try {
247 datafile = file.getCanonicalPath();
248 } catch(IOException e) {
249 e.printStackTrace();
250 return;
251 }
252 importDataFile(datafile);
253 }
254
255 protected final void importDataFile(String datafile) {
256 if(importedData.length() == 0)
257 importedData.append(datafile);
258 else
259 importedData.append(ImportDataFileSeparator).append(datafile);
260
261 }
262
263
264 public enum Type {Full, RLU, ELHOU}
265
266}