aboutsummaryrefslogtreecommitdiff
path: root/src/uk/ac/ox/cs/pagoda
diff options
context:
space:
mode:
Diffstat (limited to 'src/uk/ac/ox/cs/pagoda')
-rw-r--r--src/uk/ac/ox/cs/pagoda/Pagoda.java187
-rw-r--r--src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java307
-rw-r--r--src/uk/ac/ox/cs/pagoda/constraints/PredicateDependency.java138
-rw-r--r--src/uk/ac/ox/cs/pagoda/endomorph/Clique.java4
-rw-r--r--src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java4
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication.java8
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java51
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java70
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java16
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java46
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java429
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/Normalisation.java380
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java120
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java177
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java27
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java265
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java103
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/Violation.java15
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/ViolationTuple.java19
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java112
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java20
-rw-r--r--src/uk/ac/ox/cs/pagoda/multistage/treatement/SkolemTreatment.java4
-rw-r--r--src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java163
-rw-r--r--src/uk/ac/ox/cs/pagoda/query/DeserializedQueryRecord.java9
-rw-r--r--src/uk/ac/ox/cs/pagoda/query/GapByTriple.java35
-rw-r--r--src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java4
-rw-r--r--src/uk/ac/ox/cs/pagoda/query/QueryRecord.java713
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java156
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java78
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java9
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java78
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java37
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java26
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java412
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java245
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java19
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java36
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java6
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java26
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java106
-rw-r--r--src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java81
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java31
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/Approximator.java62
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java4
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java26
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java4
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java16
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java56
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/Program.java38
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java9
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java42
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java150
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java (renamed from src/uk/ac/ox/cs/pagoda/rules/OverApproxBoth.java)8
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java (renamed from src/uk/ac/ox/cs/pagoda/rules/OverApproxDisj.java)27
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java (renamed from src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java)139
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java118
-rw-r--r--src/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java19
-rw-r--r--src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java228
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java38
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj.java33
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj1.java20
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj2.java18
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java289
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar2.java32
-rw-r--r--src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java41
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java126
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/Properties.java66
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java49
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/Utility.java145
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/tuples/Tuple.java44
-rw-r--r--src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java25
71 files changed, 3224 insertions, 3420 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/Pagoda.java b/src/uk/ac/ox/cs/pagoda/Pagoda.java
new file mode 100644
index 0000000..aeb85a7
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/Pagoda.java
@@ -0,0 +1,187 @@
1package uk.ac.ox.cs.pagoda;
2
3import org.apache.commons.cli.*;
4import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
5import uk.ac.ox.cs.pagoda.util.PagodaProperties;
6import uk.ac.ox.cs.pagoda.util.Timer;
7import uk.ac.ox.cs.pagoda.util.Utility;
8
9import java.nio.file.Path;
10
11/**
12 * Executable command line user interface.
13 */
14public class Pagoda implements Runnable {
15
16 private static final String OPTION_ONTOLOGY = "o";
17 private static final String OPTION_DATA = "d";
18 private static final String OPTION_QUERY = "q";
19 private static final String OPTION_ANSWER = "a";
20 private static final String OPTION_CLASSIFY = "c";
21 private static final String OPTION_HERMIT = "f";
22 private final PagodaProperties properties;
23
24 /**
25 * Do not use it
26 * */
27 private Pagoda() {
28 properties = new PagodaProperties();
29 }
30
31 public static void main(String... args) {
32
33 // TODO treat the mandatory options as simple args
34 Options options = new Options();
35 options.addOption(Option.builder(OPTION_ONTOLOGY)
36 .argName(OPTION_ONTOLOGY)
37 .required()
38 .hasArg()
39 .desc("The ontology path")
40 .build());
41 options.addOption(Option.builder(OPTION_DATA).argName(OPTION_DATA).hasArg().desc("The data path").build());
42 options.addOption(Option.builder(OPTION_QUERY)
43 .argName(OPTION_QUERY)
44 .required()
45 .hasArg()
46 .desc("The query path")
47 .build());
48 options.addOption(Option.builder(OPTION_ANSWER)
49 .argName(OPTION_ANSWER)
50 .hasArg()
51 .desc("The answer path")
52 .build());
53 options.addOption(Option.builder(OPTION_CLASSIFY)
54 .argName(OPTION_CLASSIFY)
55 .desc("Tell whether to classify")
56 .type(Boolean.class)
57 .build());
58 options.addOption(Option.builder(OPTION_HERMIT)
59 .argName(OPTION_HERMIT)
60 .desc("Tell whether to call Hermit")
61 .type(Boolean.class)
62 .build());
63
64 CommandLineParser parser = new DefaultParser();
65 try {
66 CommandLine cmd = parser.parse(options, args);
67 PagodaBuilder pagodaBuilder = Pagoda.builder()
68 .ontology(cmd.getOptionValue(OPTION_ONTOLOGY))
69 .query(cmd.getOptionValue(OPTION_QUERY));
70 if(cmd.hasOption(OPTION_DATA)) pagodaBuilder.data(cmd.getOptionValue(OPTION_DATA));
71 if(cmd.hasOption(OPTION_ANSWER)) pagodaBuilder.answer(cmd.getOptionValue(OPTION_ANSWER));
72 if(cmd.hasOption(OPTION_CLASSIFY))
73 pagodaBuilder.classify(Boolean.parseBoolean(cmd.getOptionValue(OPTION_CLASSIFY)));
74 if(cmd.hasOption(OPTION_HERMIT))
75 pagodaBuilder.hermit(Boolean.parseBoolean(cmd.getOptionValue(OPTION_HERMIT)));
76
77 pagodaBuilder.build().run();
78 } catch(ParseException exp) {
79 HelpFormatter formatter = new HelpFormatter();
80 formatter.printHelp("PAGOdA", options);
81 Utility.logError("Parsing failed. Reason: " + exp.getMessage());
82 System.exit(0);
83 }
84 }
85
86 /**
87 * Get a builder.
88 * */
89 public static PagodaBuilder builder() {
90 return new PagodaBuilder();
91 }
92
93 @Override
94 public void run() {
95 Utility.logInfo("Ontology file: " + properties.getOntologyPath());
96 Utility.logInfo("Data files: " + properties.getDataPath());
97 Utility.logInfo("Query files: " + properties.getQueryPath());
98 Utility.logInfo("Answer file: " + properties.getAnswerPath());
99
100 QueryReasoner pagoda = null;
101
102 try {
103 Timer t = new Timer();
104 pagoda = QueryReasoner.getInstance(properties);
105 if (pagoda == null) return;
106
107 Utility.logInfo("Preprocessing Done in " + t.duration() + " seconds.");
108
109 if (properties.getQueryPath() != null)
110 for (String queryFile: properties.getQueryPath().split(";"))
111 pagoda.evaluate(pagoda.getQueryManager().collectQueryRecords(queryFile));
112 } finally {
113 if (pagoda != null) pagoda.dispose();
114 }
115 }
116
117 /**
118 * Allows to set the parameters before creating a Pagoda instance.
119 * */
120 public static class PagodaBuilder {
121
122 private Pagoda instance;
123
124 private PagodaBuilder() {
125 instance = new Pagoda();
126 }
127
128 public PagodaBuilder ontology(String ontologyPath) {
129 if(instance == null) return null;
130 instance.properties.setOntologyPath(ontologyPath);
131 return this;
132 }
133
134 public PagodaBuilder ontology(Path ontologyPath) {
135 return ontology(ontologyPath.toString());
136 }
137
138 public PagodaBuilder data(String dataPath) {
139 if(instance == null) return null;
140 instance.properties.setDataPath(dataPath);
141 return this;
142 }
143
144 public PagodaBuilder data(Path dataPath) {
145 return data(dataPath.toString());
146 }
147
148 public PagodaBuilder query(String queryPath) {
149 if(instance == null) return null;
150 instance.properties.setQueryPath(queryPath);
151 return this;
152 }
153
154 public PagodaBuilder query(Path queryPath) {
155 return query(queryPath.toString());
156 }
157
158 public PagodaBuilder answer(String answerPath) {
159 if(instance == null) return null;
160 instance.properties.setAnswerPath(answerPath);
161 return this;
162 }
163
164 public PagodaBuilder answer(Path answerPath) {
165 return answer(answerPath.toString());
166 }
167
168 public PagodaBuilder classify(Boolean toClassify) {
169 if(instance == null) return null;
170 instance.properties.setToClassify(toClassify);
171 return this;
172 }
173
174 public PagodaBuilder hermit(Boolean callHermit) {
175 if(instance == null) return null;
176 instance.properties.setToCallHermiT(callHermit);
177 return this;
178 }
179
180 public Pagoda build() {
181 Pagoda builtInstance = instance;
182 instance = null;
183 return builtInstance;
184 }
185
186 }
187}
diff --git a/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java b/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java
index 1ed8ba6..d961223 100644
--- a/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java
+++ b/src/uk/ac/ox/cs/pagoda/approx/RLPlusOntology.java
@@ -1,159 +1,121 @@
1package uk.ac.ox.cs.pagoda.approx; 1package uk.ac.ox.cs.pagoda.approx;
2 2
3import java.io.BufferedOutputStream; 3import org.apache.commons.io.FilenameUtils;
4import java.io.FileOutputStream;
5import java.io.IOException;
6import java.io.ObjectOutput;
7import java.io.ObjectOutputStream;
8import java.util.Collections;
9import java.util.HashMap;
10import java.util.HashSet;
11import java.util.Iterator;
12import java.util.LinkedList;
13import java.util.Map;
14import java.util.Random;
15import java.util.Set;
16
17import org.semanticweb.HermiT.Configuration; 4import org.semanticweb.HermiT.Configuration;
18import org.semanticweb.HermiT.model.DLClause; 5import org.semanticweb.HermiT.model.DLClause;
19import org.semanticweb.HermiT.model.DLOntology; 6import org.semanticweb.HermiT.model.DLOntology;
20import org.semanticweb.HermiT.structural.OWLClausification; 7import org.semanticweb.HermiT.structural.OWLClausification;
21import org.semanticweb.owlapi.model.IRI; 8import org.semanticweb.owlapi.model.*;
22import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
23import org.semanticweb.owlapi.model.OWLAxiom;
24import org.semanticweb.owlapi.model.OWLClass;
25import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
26import org.semanticweb.owlapi.model.OWLClassExpression;
27import org.semanticweb.owlapi.model.OWLDataFactory;
28import org.semanticweb.owlapi.model.OWLDataHasValue;
29import org.semanticweb.owlapi.model.OWLDataMaxCardinality;
30import org.semanticweb.owlapi.model.OWLDataMinCardinality;
31import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom;
32import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom;
33import org.semanticweb.owlapi.model.OWLDatatype;
34import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom;
35import org.semanticweb.owlapi.model.OWLIndividual;
36import org.semanticweb.owlapi.model.OWLNamedIndividual;
37import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom;
38import org.semanticweb.owlapi.model.OWLObjectComplementOf;
39import org.semanticweb.owlapi.model.OWLObjectHasValue;
40import org.semanticweb.owlapi.model.OWLObjectMaxCardinality;
41import org.semanticweb.owlapi.model.OWLObjectMinCardinality;
42import org.semanticweb.owlapi.model.OWLObjectOneOf;
43import org.semanticweb.owlapi.model.OWLObjectProperty;
44import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
45import org.semanticweb.owlapi.model.OWLObjectPropertyExpression;
46import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom;
47import org.semanticweb.owlapi.model.OWLOntology;
48import org.semanticweb.owlapi.model.OWLOntologyCreationException;
49import org.semanticweb.owlapi.model.OWLOntologyManager;
50import org.semanticweb.owlapi.model.OWLOntologyStorageException;
51import org.semanticweb.owlapi.model.OWLSameIndividualAxiom;
52import org.semanticweb.owlapi.profiles.OWL2RLProfile; 9import org.semanticweb.owlapi.profiles.OWL2RLProfile;
53import org.semanticweb.owlapi.profiles.OWLProfileReport; 10import org.semanticweb.owlapi.profiles.OWLProfileReport;
54import org.semanticweb.owlapi.profiles.OWLProfileViolation; 11import org.semanticweb.owlapi.profiles.OWLProfileViolation;
55
56import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; 12import uk.ac.ox.cs.pagoda.constraints.NullaryBottom;
57import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; 13import uk.ac.ox.cs.pagoda.constraints.UnaryBottom;
58import uk.ac.ox.cs.pagoda.owl.OWLHelper; 14import uk.ac.ox.cs.pagoda.owl.OWLHelper;
59import uk.ac.ox.cs.pagoda.util.Namespace; 15import uk.ac.ox.cs.pagoda.util.Namespace;
60import uk.ac.ox.cs.pagoda.util.Utility; 16import uk.ac.ox.cs.pagoda.util.Utility;
61 17
18import java.io.*;
19import java.nio.file.Paths;
20import java.util.*;
21
62public class RLPlusOntology implements KnowledgeBase { 22public class RLPlusOntology implements KnowledgeBase {
63 23
24 private static final String DEFAULT_ONTOLOGY_FILE_EXTENSION = "owl";
64 OWLOntologyManager manager; 25 OWLOntologyManager manager;
65 OWLDataFactory factory; 26 OWLDataFactory factory;
66 String ontologyIRI; 27 String ontologyIRI;
67 String corrFileName = null; 28 String corrFileName = null;
68 String outputPath, aBoxPath; 29 String outputPath, aBoxPath;
69
70 OWLOntology inputOntology = null; 30 OWLOntology inputOntology = null;
71 OWLOntology tBox = null; 31 OWLOntology tBox = null;
72 OWLOntology aBox = null; 32 OWLOntology aBox = null;
73 OWLOntology restOntology = null; 33 OWLOntology restOntology = null;
74 OWLOntology outputOntology = null; //RL ontology 34 OWLOntology outputOntology = null; //RL ontology
75
76 DLOntology dlOntology = null; 35 DLOntology dlOntology = null;
77 int rlCounter = 0; 36 int rlCounter = 0;
78 37 LinkedList<Clause> clauses;
79 LinkedList<Clause> clauses;
80 Map<OWLAxiom, OWLAxiom> correspondence; 38 Map<OWLAxiom, OWLAxiom> correspondence;
81 39 BottomStrategy botStrategy;
82 BottomStrategy botStrategy; 40 Random random = new Random(19900114);
83 41 private Map<OWLClassExpression, Integer> subCounter = null;
42 private Map<OWLClass, OWLClass> atomic2negation = new HashMap<OWLClass, OWLClass>();
43
44 // FIXME multiple anonymous ontologies
84 @Override 45 @Override
85 public void load(OWLOntology o, uk.ac.ox.cs.pagoda.constraints.BottomStrategy bottomStrategy) { 46 public void load(OWLOntology ontology, uk.ac.ox.cs.pagoda.constraints.BottomStrategy bottomStrategy) {
86 if (bottomStrategy instanceof UnaryBottom) 47 if (bottomStrategy instanceof UnaryBottom)
87 botStrategy = BottomStrategy.UNARY; 48 botStrategy = BottomStrategy.UNARY;
88 else if (bottomStrategy instanceof NullaryBottom) 49 else if (bottomStrategy instanceof NullaryBottom)
89 botStrategy = BottomStrategy.NULLARY; 50 botStrategy = BottomStrategy.NULLARY;
90 else 51 else
91 botStrategy = BottomStrategy.TOREMOVE; 52 botStrategy = BottomStrategy.TOREMOVE;
92 53
93 if (corrFileName == null) 54 if(corrFileName == null)
94 corrFileName = "rlplus.crr"; 55 corrFileName = "rlplus.crr";
95 manager = o.getOWLOntologyManager(); 56 manager = ontology.getOWLOntologyManager();
96// manager = OWLManager.createOWLOntologyManager(); 57// manager = OWLManager.createOWLOntologyManager();
97 factory = manager.getOWLDataFactory(); 58 factory = manager.getOWLDataFactory();
98 inputOntology = o; 59 inputOntology = ontology;
99 60
100 try { 61 try {
101 String path = OWLHelper.getOntologyPath(inputOntology); 62 IRI ontologyIri;
102 String name; 63 if(ontology.isAnonymous()) {
103 if (path.contains(Utility.JAVA_FILE_SEPARATOR)) 64 String anonymousOntologySuffix = Long.toString(System.currentTimeMillis());
104 name = path.substring(path.lastIndexOf(Utility.JAVA_FILE_SEPARATOR)); 65 ontologyIri = IRI.create("http://www.example.org/", "anonymous-ontology-"
105 else 66 + anonymousOntologySuffix + "." + DEFAULT_ONTOLOGY_FILE_EXTENSION);
106 name = path.substring(path.lastIndexOf(":")); 67 }
107 68 else
108 String originalExtension = name.lastIndexOf(".") >= 0 ? name.substring(name.lastIndexOf(".")) : ""; 69 ontologyIri = inputOntology.getOntologyID().getOntologyIRI();
109 70
110 if (inputOntology.getOntologyID().getOntologyIRI() == null) 71 String ontologyIriPrefix = ontologyIri.getNamespace();
111 ontologyIRI = "http://www.example.org/anonymous-ontology" + originalExtension; 72 ontologyIRI = ontologyIri.toString();
112 else 73 String ontologyIriFragment = ontologyIri.getFragment();
113 ontologyIRI = inputOntology.getOntologyID().getOntologyIRI().toString(); 74 String originalFileName = FilenameUtils.removeExtension(ontologyIriFragment);
114 75 String originalExtension = FilenameUtils.getExtension(ontologyIriFragment);
115 String tOntoIRI = ontologyIRI; 76 if(originalExtension.isEmpty()) originalExtension = DEFAULT_ONTOLOGY_FILE_EXTENSION;
116 if (!tOntoIRI.endsWith(originalExtension)) tOntoIRI += originalExtension; 77
117 78
118 String rlOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-RL.owl" : tOntoIRI.replaceFirst(originalExtension, "-RL.owl"); 79 IRI rlOntologyIRI = IRI.create(ontologyIriPrefix, originalFileName + "-RL." + originalExtension);
119 String rlDocumentIRI = (outputPath = Utility.TempDirectory + "RL.owl"); 80 outputPath = Paths.get(Utility.getGlobalTempDirAbsolutePath(),
120 outputOntology = manager.createOntology(IRI.create(rlOntologyIRI)); 81 originalFileName + "-RL." + originalExtension).toString();
121 manager.setOntologyDocumentIRI(outputOntology, IRI.create(Utility.toFileIRI(rlDocumentIRI))); 82 IRI rlDocumentIRI = IRI.create(outputPath);
122 83 outputOntology = manager.createOntology(rlOntologyIRI);
123 String tBoxOntologyIRI, aBoxOntologyIRI; 84 manager.setOntologyDocumentIRI(outputOntology, rlDocumentIRI);
124 tBoxOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-TBox.owl" : tOntoIRI.replaceFirst(originalExtension, "-TBox.owl"); 85
125 aBoxOntologyIRI = originalExtension.isEmpty() ? tOntoIRI + "-ABox.owl" : tOntoIRI.replaceFirst(originalExtension, "-ABox.owl"); 86 String tBoxOntologyFragment = originalFileName + "-TBox." + originalExtension;
126 87 IRI tBoxOntologyIRI = IRI.create(ontologyIriPrefix, tBoxOntologyFragment);
127 String tBoxDocumentIRI = (Utility.TempDirectory + "TBox.owl"); 88 IRI tBoxDocumentIRI =
128 String aBoxDocumentIRI = (aBoxPath = Utility.TempDirectory + "ABox.owl"); 89 IRI.create("file://" + Paths.get(Utility.getGlobalTempDirAbsolutePath(), tBoxOntologyFragment));
129 tBox = manager.createOntology(IRI.create(tBoxOntologyIRI)); 90
130 aBox = manager.createOntology(IRI.create(aBoxOntologyIRI)); 91 String aBoxOntologyFragment = originalFileName + "-ABox." + originalExtension;
131 manager.setOntologyDocumentIRI(tBox, IRI.create(Utility.toFileIRI(tBoxDocumentIRI))); 92 IRI aBoxOntologyIRI = IRI.create(ontologyIriPrefix, aBoxOntologyFragment);
132 manager.setOntologyDocumentIRI(aBox, IRI.create(Utility.toFileIRI(aBoxDocumentIRI))); 93 aBoxPath = Paths.get(Utility.getGlobalTempDirAbsolutePath()) + aBoxOntologyFragment;
133 94 IRI aBoxDocumentIRI =
134 FileOutputStream aBoxOut = new FileOutputStream(aBoxPath); 95 IRI.create("file://" + Paths.get(Utility.getGlobalTempDirAbsolutePath(), aBoxOntologyFragment));
96
97 tBox = manager.createOntology(tBoxOntologyIRI);
98 aBox = manager.createOntology(aBoxOntologyIRI);
99 manager.setOntologyDocumentIRI(tBox, tBoxDocumentIRI);
100 manager.setOntologyDocumentIRI(aBox, aBoxDocumentIRI);
101
102 FileOutputStream aBoxOut = new FileOutputStream(aBoxPath);
135 manager.saveOntology(aBox, aBoxOut); 103 manager.saveOntology(aBox, aBoxOut);
136 aBoxOut.close(); 104 aBoxOut.close();
137 105
138 restOntology = manager.createOntology(); 106 restOntology = manager.createOntology();
139 } 107 } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) {
140 catch (OWLOntologyCreationException e) {
141 e.printStackTrace();
142 } catch (OWLOntologyStorageException e) {
143 // TODO Auto-generated catch block
144 e.printStackTrace();
145 } catch (IOException e) {
146 // TODO Auto-generated catch block
147 e.printStackTrace(); 108 e.printStackTrace();
109 System.exit(1);
148 } 110 }
149 } 111 }
150 112
151 public OWLOntology getTBox() { 113 public OWLOntology getTBox() {
152 return tBox; 114 return tBox;
153 } 115 }
154 116
155 public String getABoxPath() { 117 public String getABoxPath() {
156 return aBoxPath; 118 return aBoxPath;
157 } 119 }
158 120
159 private void add2SubCounter(OWLClassExpression exp) { 121 private void add2SubCounter(OWLClassExpression exp) {
@@ -164,24 +126,23 @@ public class RLPlusOntology implements KnowledgeBase {
164 } 126 }
165 127
166 public void simplify() { 128 public void simplify() {
167 if (simplifyABox()) { 129 if(simplifyABox()) {
168 save(aBox); 130 save(aBox);
169// save(tBox); 131// save(tBox);
170 } 132 } else
171 else 133 tBox = inputOntology;
172 tBox = inputOntology;
173 } 134 }
174 135
175 @Override 136 @Override
176 public void transform() { 137 public void transform() {
177 simplify(); 138 simplify();
178 filter(); 139 filter();
179 clausify(); 140 clausify();
180 141
181 subCounter = new HashMap<OWLClassExpression, Integer>(); 142 subCounter = new HashMap<OWLClassExpression, Integer>();
182 clauses = new LinkedList<Clause>(); 143 clauses = new LinkedList<Clause>();
183 Clausifier clausifier = Clausifier.getInstance(restOntology); 144 Clausifier clausifier = Clausifier.getInstance(restOntology);
184 145
185 for (DLClause c: dlOntology.getDLClauses()) { 146 for (DLClause c: dlOntology.getDLClauses()) {
186 Clause clause = new Clause(clausifier, c); 147 Clause clause = new Clause(clausifier, c);
187 clauses.add(clause); 148 clauses.add(clause);
@@ -190,7 +151,7 @@ public class RLPlusOntology implements KnowledgeBase {
190 * count the expressions in the left 151 * count the expressions in the left
191 */ 152 */
192 for (OWLClassExpression exp: clause.getSubClasses()) { 153 for (OWLClassExpression exp: clause.getSubClasses()) {
193 if (exp instanceof OWLClass) 154 if(exp instanceof OWLClass)
194 add2SubCounter(exp); 155 add2SubCounter(exp);
195 else if (exp instanceof OWLObjectSomeValuesFrom) { 156 else if (exp instanceof OWLObjectSomeValuesFrom) {
196 OWLObjectSomeValuesFrom someValue = (OWLObjectSomeValuesFrom)exp; 157 OWLObjectSomeValuesFrom someValue = (OWLObjectSomeValuesFrom)exp;
@@ -201,8 +162,7 @@ public class RLPlusOntology implements KnowledgeBase {
201 OWLObjectMinCardinality minCard = (OWLObjectMinCardinality)exp; 162 OWLObjectMinCardinality minCard = (OWLObjectMinCardinality)exp;
202 add2SubCounter(factory.getOWLObjectSomeValuesFrom(minCard.getProperty(), factory.getOWLThing())); 163 add2SubCounter(factory.getOWLObjectSomeValuesFrom(minCard.getProperty(), factory.getOWLThing()));
203 add2SubCounter(minCard.getFiller()); 164 add2SubCounter(minCard.getFiller());
204 } 165 } else
205 else
206 Utility.logError("strange class expression: " + exp); 166 Utility.logError("strange class expression: " + exp);
207 167
208 } 168 }
@@ -216,12 +176,12 @@ public class RLPlusOntology implements KnowledgeBase {
216 addedAxioms.clear(); 176 addedAxioms.clear();
217 for (OWLClassExpression exp: getDisjunctionApprox0(clause.getSuperClasses())) { 177 for (OWLClassExpression exp: getDisjunctionApprox0(clause.getSuperClasses())) {
218 addedAxioms.add(factory.getOWLSubClassOfAxiom(subExp, transform(exp, addedAxioms))); 178 addedAxioms.add(factory.getOWLSubClassOfAxiom(subExp, transform(exp, addedAxioms)));
219 for (OWLAxiom a: addedAxioms) 179 for(OWLAxiom a : addedAxioms)
220 addAxiom2output(a, factory.getOWLSubClassOfAxiom(subExp, 180 addAxiom2output(a, factory.getOWLSubClassOfAxiom(subExp,
221 OWLHelper.getSimplifiedDisjunction(factory, clause.getSuperClasses()))); 181 OWLHelper.getSimplifiedDisjunction(factory, clause.getSuperClasses())));
222 } 182 }
223 } 183 }
224 184
225 subCounter.clear(); 185 subCounter.clear();
226 } 186 }
227 187
@@ -231,7 +191,7 @@ public class RLPlusOntology implements KnowledgeBase {
231 save(correspondence, corrFileName); 191 save(correspondence, corrFileName);
232 save(outputOntology); 192 save(outputOntology);
233 } 193 }
234 194
235 private void save(Map<OWLAxiom, OWLAxiom> map, String corrFileName) { 195 private void save(Map<OWLAxiom, OWLAxiom> map, String corrFileName) {
236 if (corrFileName == null) return ; 196 if (corrFileName == null) return ;
237 ObjectOutput output; 197 ObjectOutput output;
@@ -251,63 +211,61 @@ public class RLPlusOntology implements KnowledgeBase {
251 e.printStackTrace(); 211 e.printStackTrace();
252 } 212 }
253 } 213 }
254 214
255 /* 215 /*
256 * treat disjunction as conjunction 216 * treat disjunction as conjunction
257 */ 217 */
258 private Set<OWLClassExpression> getDisjunctionApprox0(Set<OWLClassExpression> superClasses) { 218 private Set<OWLClassExpression> getDisjunctionApprox0(Set<OWLClassExpression> superClasses) {
259 return superClasses; 219 return superClasses;
260 } 220 }
261 221
262 /* 222 /*
263 * choose one simple class disjunct 223 * choose one simple class disjunct
264 */ 224 */
265 @SuppressWarnings("unused") 225 @SuppressWarnings("unused")
266 private Set<OWLClassExpression> getDisjunctionApprox1(Set<OWLClassExpression> superClasses) { 226 private Set<OWLClassExpression> getDisjunctionApprox1(Set<OWLClassExpression> superClasses) {
267 if (superClasses.isEmpty() || superClasses.size() == 1) 227 if(superClasses.isEmpty() || superClasses.size() == 1)
268 return superClasses; 228 return superClasses;
269 229
270 OWLClassExpression rep = null; 230 OWLClassExpression rep = null;
271 int min = Integer.MAX_VALUE, o; 231 int min = Integer.MAX_VALUE, o;
272 for (OWLClassExpression exp: superClasses) 232 for(OWLClassExpression exp : superClasses)
273 if (exp instanceof OWLClass && (o = getOccurrence(exp)) < min) { 233 if(exp instanceof OWLClass && (o = getOccurrence(exp)) < min) {
274 min = o; 234 min = o;
275 rep = exp; 235 rep = exp;
276 } 236 }
277 237
278 if (rep == null) rep = superClasses.iterator().next(); 238 if(rep == null) rep = superClasses.iterator().next();
279 239
280 return Collections.singleton(rep); 240 return Collections.singleton(rep);
281 } 241 }
282 242
283 Random random = new Random(19900114);
284 /* 243 /*
285 * randomly choose a class expression to represent this disjunction 244 * randomly choose a class expression to represent this disjunction
286 */ 245 */
287 @SuppressWarnings("unused") 246 @SuppressWarnings("unused")
288 private Set<OWLClassExpression> getDisjunctionApprox2(Set<OWLClassExpression> superClasses) { 247 private Set<OWLClassExpression> getDisjunctionApprox2(Set<OWLClassExpression> superClasses) {
289 if (superClasses.isEmpty() || superClasses.size() == 1) 248 if(superClasses.isEmpty() || superClasses.size() == 1)
290 return superClasses; 249 return superClasses;
291 250
292 int index = random.nextInt() % superClasses.size(); 251 int index = random.nextInt() % superClasses.size();
293 if (index < 0) index += superClasses.size(); 252 if (index < 0) index += superClasses.size();
294 253
295 int i = 0; 254 int i = 0;
296 for (OWLClassExpression exp: superClasses) 255 for(OWLClassExpression exp : superClasses)
297 if (i++ == index) 256 if (i++ == index)
298 return Collections.singleton(exp); 257 return Collections.singleton(exp);
299 return null; 258 return null;
300 } 259 }
301 260
302 private Map<OWLClassExpression, Integer> subCounter = null;
303 /* 261 /*
304 * choose the one that appears least in the l.h.s. 262 * choose the one that appears least in the l.h.s.
305 */ 263 */
306 @SuppressWarnings("unused") 264 @SuppressWarnings("unused")
307 private Set<OWLClassExpression> getDisjunctionApprox3(Set<OWLClassExpression> superClasses) { 265 private Set<OWLClassExpression> getDisjunctionApprox3(Set<OWLClassExpression> superClasses) {
308 if (superClasses.isEmpty() || superClasses.size() == 1) 266 if(superClasses.isEmpty() || superClasses.size() == 1)
309 return superClasses; 267 return superClasses;
310 268
311 OWLClassExpression rep = null, exp1; 269 OWLClassExpression rep = null, exp1;
312 int occurrence = Integer.MAX_VALUE, o; 270 int occurrence = Integer.MAX_VALUE, o;
313 for (OWLClassExpression exp: superClasses) { 271 for (OWLClassExpression exp: superClasses) {
@@ -318,16 +276,16 @@ public class RLPlusOntology implements KnowledgeBase {
318 if (minCard.getCardinality() == 1) 276 if (minCard.getCardinality() == 1)
319 exp1 = factory.getOWLObjectSomeValuesFrom(minCard.getProperty(), minCard.getFiller()); 277 exp1 = factory.getOWLObjectSomeValuesFrom(minCard.getProperty(), minCard.getFiller());
320 } 278 }
321 279
322 if (!subCounter.containsKey(exp1) || (o = subCounter.get(exp1)) < occurrence) { 280 if (!subCounter.containsKey(exp1) || (o = subCounter.get(exp1)) < occurrence) {
323 rep = exp; 281 rep = exp;
324 occurrence = o; 282 occurrence = o;
325 } 283 }
326 } 284 }
327 285
328 return Collections.singleton(rep); 286 return Collections.singleton(rep);
329 } 287 }
330 288
331 private int getOccurrence(OWLClassExpression exp) { 289 private int getOccurrence(OWLClassExpression exp) {
332 if (!subCounter.containsKey(exp)) 290 if (!subCounter.containsKey(exp))
333 return 0; 291 return 0;
@@ -336,9 +294,9 @@ public class RLPlusOntology implements KnowledgeBase {
336 294
337 @SuppressWarnings("unused") 295 @SuppressWarnings("unused")
338 private Set<OWLClassExpression> getDisjunctionApprox4(Set<OWLClassExpression> superClasses) { 296 private Set<OWLClassExpression> getDisjunctionApprox4(Set<OWLClassExpression> superClasses) {
339 if (superClasses.isEmpty() || superClasses.size() == 1) 297 if(superClasses.isEmpty() || superClasses.size() == 1)
340 return superClasses; 298 return superClasses;
341 299
342 OWLClassExpression rep = null; 300 OWLClassExpression rep = null;
343 int occurrence = Integer.MAX_VALUE, o; 301 int occurrence = Integer.MAX_VALUE, o;
344 for (OWLClassExpression exp: superClasses) { 302 for (OWLClassExpression exp: superClasses) {
@@ -350,10 +308,9 @@ public class RLPlusOntology implements KnowledgeBase {
350 o += getOccurrence(minCard.getFiller()); 308 o += getOccurrence(minCard.getFiller());
351// if (o < o1) o = o1; 309// if (o < o1) o = o1;
352 } 310 }
353 } 311 } else
354 else
355 o = getOccurrence(exp); 312 o = getOccurrence(exp);
356 313
357 if (o < occurrence || o == occurrence && !(rep instanceof OWLClass)) { 314 if (o < occurrence || o == occurrence && !(rep instanceof OWLClass)) {
358 rep = exp; 315 rep = exp;
359 occurrence = o; 316 occurrence = o;
@@ -366,11 +323,11 @@ public class RLPlusOntology implements KnowledgeBase {
366 private boolean simplifyABox() { 323 private boolean simplifyABox() {
367 boolean flag = false; 324 boolean flag = false;
368 Map<OWLClassExpression, OWLClass> complex2atomic= new HashMap<OWLClassExpression, OWLClass>(); 325 Map<OWLClassExpression, OWLClass> complex2atomic= new HashMap<OWLClassExpression, OWLClass>();
369 326
370 OWLDatatype anyURI = factory.getOWLDatatype(IRI.create(Namespace.XSD_NS + "anyURI")); 327 OWLDatatype anyURI = factory.getOWLDatatype(IRI.create(Namespace.XSD_NS + "anyURI"));
371 OWLObjectProperty sameAs = factory.getOWLObjectProperty(IRI.create(Namespace.EQUALITY)); 328 OWLObjectProperty sameAs = factory.getOWLObjectProperty(IRI.create(Namespace.EQUALITY));
372 OWLObjectProperty differentFrom = factory.getOWLObjectProperty(IRI.create(Namespace.INEQUALITY)); 329 OWLObjectProperty differentFrom = factory.getOWLObjectProperty(IRI.create(Namespace.INEQUALITY));
373 330
374 for (OWLOntology imported: inputOntology.getImportsClosure()) 331 for (OWLOntology imported: inputOntology.getImportsClosure())
375 for (OWLAxiom axiom: imported.getAxioms()) { 332 for (OWLAxiom axiom: imported.getAxioms()) {
376 if (axiom instanceof OWLClassAssertionAxiom) { 333 if (axiom instanceof OWLClassAssertionAxiom) {
@@ -380,7 +337,7 @@ public class RLPlusOntology implements KnowledgeBase {
380 OWLClass cls; 337 OWLClass cls;
381 if (clsExp instanceof OWLClass) { 338 if (clsExp instanceof OWLClass) {
382 if (((OWLClass) clsExp).toStringID().startsWith("owl:")) 339 if (((OWLClass) clsExp).toStringID().startsWith("owl:"))
383 manager.addAxiom(tBox, axiom); 340 manager.addAxiom(tBox, axiom);
384 else manager.addAxiom(aBox, axiom); 341 else manager.addAxiom(aBox, axiom);
385 } 342 }
386 else { 343 else {
@@ -389,40 +346,38 @@ public class RLPlusOntology implements KnowledgeBase {
389 manager.addAxiom(tBox, factory.getOWLSubClassOfAxiom(cls, clsExp)); 346 manager.addAxiom(tBox, factory.getOWLSubClassOfAxiom(cls, clsExp));
390 } 347 }
391 manager.addAxiom(aBox, factory.getOWLClassAssertionAxiom(cls, assertion.getIndividual())); 348 manager.addAxiom(aBox, factory.getOWLClassAssertionAxiom(cls, assertion.getIndividual()));
392 } 349 }
393 } 350 }
394 else if (axiom instanceof OWLObjectPropertyAssertionAxiom || axiom instanceof OWLDataPropertyAssertionAxiom || axiom instanceof OWLAnnotationAssertionAxiom) { 351 else if (axiom instanceof OWLObjectPropertyAssertionAxiom || axiom instanceof OWLDataPropertyAssertionAxiom || axiom instanceof OWLAnnotationAssertionAxiom) {
395 if (axiom.getDataPropertiesInSignature().contains(anyURI)) continue; 352 if(axiom.getDataPropertiesInSignature().contains(anyURI)) continue;
396 flag = true; 353 flag = true;
397 manager.addAxiom(aBox, axiom); 354 manager.addAxiom(aBox, axiom);
398 } 355 }
399 else if (axiom instanceof OWLSameIndividualAxiom) { 356 else if (axiom instanceof OWLSameIndividualAxiom) {
400 OWLIndividual firstIndividual = null, previousIndividual = null, lastIndividual = null; 357 OWLIndividual firstIndividual = null, previousIndividual = null, lastIndividual = null;
401 for (OWLIndividual next: ((OWLSameIndividualAxiom) axiom).getIndividuals()) { 358 for (OWLIndividual next: ((OWLSameIndividualAxiom) axiom).getIndividuals()) {
402 if (firstIndividual == null) firstIndividual = previousIndividual = next; 359 if(firstIndividual == null) firstIndividual = previousIndividual = next;
403 else 360 else
404 manager.addAxiom(aBox, factory.getOWLObjectPropertyAssertionAxiom(sameAs, previousIndividual, next)); 361 manager.addAxiom(aBox, factory.getOWLObjectPropertyAssertionAxiom(sameAs, previousIndividual, next));
405 previousIndividual = lastIndividual = next; 362 previousIndividual = lastIndividual = next;
406 } 363 }
407 manager.addAxiom(aBox, factory.getOWLObjectPropertyAssertionAxiom(sameAs, lastIndividual, firstIndividual)); 364 manager.addAxiom(aBox, factory.getOWLObjectPropertyAssertionAxiom(sameAs, lastIndividual, firstIndividual));
408 } 365 }
409 else if (axiom instanceof OWLDifferentIndividualsAxiom) { 366 else if (axiom instanceof OWLDifferentIndividualsAxiom) {
410 int index1 = 0, index2; 367 int index1 = 0, index2;
411 for (OWLIndividual individual1: ((OWLDifferentIndividualsAxiom) axiom).getIndividuals()) { 368 for (OWLIndividual individual1: ((OWLDifferentIndividualsAxiom) axiom).getIndividuals()) {
412 ++index1; 369 ++index1;
413 index2 = 0; 370 index2 = 0;
414 for (OWLIndividual individual2: ((OWLDifferentIndividualsAxiom) axiom).getIndividuals()) { 371 for (OWLIndividual individual2: ((OWLDifferentIndividualsAxiom) axiom).getIndividuals()) {
415 if (index2++ < index1) { 372 if (index2++ < index1) {
416 manager.addAxiom(aBox, factory.getOWLObjectPropertyAssertionAxiom(differentFrom, individual1, individual2)); 373 manager.addAxiom(aBox, factory.getOWLObjectPropertyAssertionAxiom(differentFrom, individual1, individual2));
417 } 374 } else break;
418 else break;
419 } 375 }
420 } 376 }
421 } 377 } else
422 else
423 manager.addAxiom(tBox, axiom); 378 manager.addAxiom(tBox, axiom);
424 } 379 }
425 380
426 return flag; 381 return flag;
427 } 382 }
428 383
@@ -430,15 +385,15 @@ public class RLPlusOntology implements KnowledgeBase {
430 OWL2RLProfile profile = new OWL2RLProfile(); 385 OWL2RLProfile profile = new OWL2RLProfile();
431 OWLProfileReport report = profile.checkOntology(tBox); 386 OWLProfileReport report = profile.checkOntology(tBox);
432 Set<OWLAxiom> rlAxioms = tBox.getAxioms(); 387 Set<OWLAxiom> rlAxioms = tBox.getAxioms();
433 OWLAxiom axiom; 388 OWLAxiom axiom;
434 389
435 for (OWLProfileViolation violation: report.getViolations()) { 390 for (OWLProfileViolation violation: report.getViolations()) {
436 manager.addAxiom(restOntology, axiom = violation.getAxiom()); 391 manager.addAxiom(restOntology, axiom = violation.getAxiom());
437 rlAxioms.remove(axiom); 392 rlAxioms.remove(axiom);
438 } 393 }
439 394
440 for (Iterator<OWLAxiom> iter = rlAxioms.iterator(); iter.hasNext(); ) 395 for (Iterator<OWLAxiom> iter = rlAxioms.iterator(); iter.hasNext(); )
441 addAxiom2output(iter.next(), null); 396 addAxiom2output(iter.next(), null);
442 } 397 }
443 398
444 private void clausify() { 399 private void clausify() {
@@ -453,8 +408,6 @@ public class RLPlusOntology implements KnowledgeBase {
453 if (correspondingAxiom != null) 408 if (correspondingAxiom != null)
454 correspondence.put(axiom, correspondingAxiom); 409 correspondence.put(axiom, correspondingAxiom);
455 } 410 }
456
457 private Map<OWLClass, OWLClass> atomic2negation = new HashMap<OWLClass, OWLClass>();
458 411
459 private OWLClassExpression transform(OWLClassExpression exp, Set<OWLAxiom> addedAxioms) { 412 private OWLClassExpression transform(OWLClassExpression exp, Set<OWLAxiom> addedAxioms) {
460 if (exp instanceof OWLClass) 413 if (exp instanceof OWLClass)
@@ -493,7 +446,7 @@ public class RLPlusOntology implements KnowledgeBase {
493 addedAxioms.add(factory.getOWLObjectPropertyRangeAxiom(r, tExp)); 446 addedAxioms.add(factory.getOWLObjectPropertyRangeAxiom(r, tExp));
494 } 447 }
495 else if (botStrategy != BottomStrategy.TOREMOVE) { 448 else if (botStrategy != BottomStrategy.TOREMOVE) {
496 OWLClass cls = (OWLClass) ((OWLObjectComplementOf) tExp).getComplementNNF(); 449 OWLClass cls = (OWLClass) tExp.getComplementNNF();
497 OWLClass neg; 450 OWLClass neg;
498 if ((neg = atomic2negation.get(cls)) == null) { 451 if ((neg = atomic2negation.get(cls)) == null) {
499 neg = getNewConcept(outputOntology, rlCounter); 452 neg = getNewConcept(outputOntology, rlCounter);
@@ -637,6 +590,6 @@ public class RLPlusOntology implements KnowledgeBase {
637 corrFileName = path; 590 corrFileName = path;
638 } 591 }
639 592
640 private static enum BottomStrategy { TOREMOVE, NULLARY, UNARY } 593 private enum BottomStrategy { TOREMOVE, NULLARY, UNARY }
641} 594}
642 595
diff --git a/src/uk/ac/ox/cs/pagoda/constraints/PredicateDependency.java b/src/uk/ac/ox/cs/pagoda/constraints/PredicateDependency.java
index b201918..70f841f 100644
--- a/src/uk/ac/ox/cs/pagoda/constraints/PredicateDependency.java
+++ b/src/uk/ac/ox/cs/pagoda/constraints/PredicateDependency.java
@@ -1,155 +1,135 @@
1package uk.ac.ox.cs.pagoda.constraints; 1package uk.ac.ox.cs.pagoda.constraints;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.HashMap; 4import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
5import java.util.HashSet;
6import java.util.LinkedList;
7import java.util.Map;
8import java.util.Queue;
9import java.util.Set;
10
11import org.semanticweb.HermiT.model.AnnotatedEquality;
12import org.semanticweb.HermiT.model.AtLeastConcept;
13import org.semanticweb.HermiT.model.AtLeastDataRange;
14import org.semanticweb.HermiT.model.Atom;
15import org.semanticweb.HermiT.model.AtomicConcept;
16import org.semanticweb.HermiT.model.AtomicNegationConcept;
17import org.semanticweb.HermiT.model.AtomicRole;
18import org.semanticweb.HermiT.model.DLClause;
19import org.semanticweb.HermiT.model.DLPredicate;
20import org.semanticweb.HermiT.model.Equality;
21import org.semanticweb.HermiT.model.Inequality;
22import org.semanticweb.HermiT.model.InverseRole;
23
24import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
25import uk.ac.ox.cs.pagoda.util.Namespace; 5import uk.ac.ox.cs.pagoda.util.Namespace;
26import uk.ac.ox.cs.pagoda.util.Utility; 6import uk.ac.ox.cs.pagoda.util.Utility;
27 7
8import java.util.*;
9
28 10
29public class PredicateDependency extends DependencyGraph<DLPredicate> { 11public class PredicateDependency extends DependencyGraph<DLPredicate> {
30 12
31 Collection<DLClause> m_clauses; 13 private static final DLPredicate equality = AtomicRole.create(Namespace.EQUALITY);
32 Map<PredicatePair, LinkedList<DLClause>> edgeLabels = new HashMap<PredicatePair, LinkedList<DLClause>>(); 14 private static final DLPredicate inequality = AtomicRole.create(Namespace.INEQUALITY);
33 15 Collection<DLClause> m_clauses;
16 Map<PredicatePair, LinkedList<DLClause>> edgeLabels = new HashMap<PredicatePair, LinkedList<DLClause>>();
17 Set<DLPredicate> reachableToBottom = null;
18
34 public PredicateDependency(Collection<DLClause> clauses) { 19 public PredicateDependency(Collection<DLClause> clauses) {
35 m_clauses = clauses; 20 m_clauses = clauses;
36 build(); 21 build();
37 } 22 }
38 23
39 @Override 24 @Override
40 protected void build() { 25 protected void build() {
41 update(m_clauses); 26 update(m_clauses);
42 27
43 addLink(equality, AtomicConcept.NOTHING); 28 addLink(equality, AtomicConcept.NOTHING);
44 addLink(inequality, AtomicConcept.NOTHING); 29 addLink(inequality, AtomicConcept.NOTHING);
45 } 30 }
46 31
47 private void addEdgeLabel(DLPredicate body, DLPredicate head, DLClause clause) { 32 private void addEdgeLabel(DLPredicate body, DLPredicate head, DLClause clause) {
48 PredicatePair key = new PredicatePair(body, head); 33 PredicatePair key = new PredicatePair(body, head);
49 LinkedList<DLClause> value; 34 LinkedList<DLClause> value;
50 if ((value = edgeLabels.get(key)) == null) 35 if ((value = edgeLabels.get(key)) == null)
51 edgeLabels.put(key, value = new LinkedList<DLClause>()); 36 edgeLabels.put(key, value = new LinkedList<DLClause>());
52 value.add(clause); 37 value.add(clause);
53 } 38 }
54 39
55 private void addLinks4Negation(AtomicConcept c, DLClause clause) { 40 private void addLinks4Negation(AtomicConcept c, DLClause clause) {
56 addLink(c, AtomicConcept.NOTHING); 41 addLink(c, AtomicConcept.NOTHING);
57 addEdgeLabel(c, AtomicConcept.NOTHING, clause); 42 addEdgeLabel(c, AtomicConcept.NOTHING, clause);
58 String iri = c.getIRI(); 43 String iri = c.getIRI();
59 addLink(c = AtomicConcept.create(iri.substring(0, iri.length() - 4)), AtomicConcept.NOTHING); 44 addLink(c = AtomicConcept.create(iri.substring(0, iri.length() - 4)), AtomicConcept.NOTHING);
60 addEdgeLabel(c, AtomicConcept.NOTHING, clause); 45 addEdgeLabel(c, AtomicConcept.NOTHING, clause);
61 } 46 }
62 47
63 public Set<DLPredicate> collectPredicate(Atom[] atoms) { 48 public Set<DLPredicate> collectPredicate(Atom[] atoms) {
64 Set<DLPredicate> predicates = new HashSet<DLPredicate>(); 49 Set<DLPredicate> predicates = new HashSet<DLPredicate>();
65 for (Atom atom: atoms) 50 for (Atom atom : atoms)
66 predicates.addAll(getAtomicPredicates(atom.getDLPredicate())); 51 predicates.addAll(getAtomicPredicates(atom.getDLPredicate()));
67 return predicates; 52 return predicates;
68 } 53 }
69
70 private static final DLPredicate equality = AtomicRole.create(Namespace.EQUALITY);
71 private static final DLPredicate inequality = AtomicRole.create(Namespace.INEQUALITY);
72 54
73 private Set<DLPredicate> getAtomicPredicates(DLPredicate predicate) { 55 private Set<DLPredicate> getAtomicPredicates(DLPredicate predicate) {
74 Set<DLPredicate> predicates = new HashSet<DLPredicate>(); 56 Set<DLPredicate> predicates = new HashSet<DLPredicate>();
75 if (predicate instanceof AtLeastConcept) 57 if (predicate instanceof AtLeastConcept)
76 predicates.addAll(getAtomicPredicates((AtLeastConcept) predicate)); 58 predicates.addAll(getAtomicPredicates((AtLeastConcept) predicate));
77 else { 59 else {
78 if ((predicate = getAtomicPredicate(predicate)) != null) 60 if ((predicate = getAtomicPredicate(predicate)) != null)
79 predicates.add(predicate); 61 predicates.add(predicate);
80 } 62 }
81 return predicates; 63 return predicates;
82 } 64 }
83 65
84 private Set<DLPredicate> getAtomicPredicates(AtLeastConcept alc) { 66 private Set<DLPredicate> getAtomicPredicates(AtLeastConcept alc) {
85 Set<DLPredicate> set = new HashSet<DLPredicate>(); 67 Set<DLPredicate> set = new HashSet<DLPredicate>();
86 if (alc.getOnRole() instanceof AtomicRole) 68 if (alc.getOnRole() instanceof AtomicRole)
87 set.add((AtomicRole) alc.getOnRole()); 69 set.add((AtomicRole) alc.getOnRole());
88 else 70 else
89 set.add(((InverseRole) alc.getOnRole()).getInverseOf()); 71 set.add(((InverseRole) alc.getOnRole()).getInverseOf());
90 72
91 if (alc.getToConcept() instanceof AtomicConcept) 73 if (alc.getToConcept() instanceof AtomicConcept)
92 if (alc.getToConcept().equals(AtomicConcept.THING)); 74 if (alc.getToConcept().equals(AtomicConcept.THING)) ;
93 else set.add((AtomicConcept) alc.getToConcept()); 75 else set.add((AtomicConcept) alc.getToConcept());
94 else 76 else
95 set.add(OverApproxExist.getNegationConcept(((AtomicNegationConcept) alc.getToConcept()).getNegatedAtomicConcept())); 77 set.add(OverApproxExist.getNegationConcept(((AtomicNegationConcept) alc.getToConcept()).getNegatedAtomicConcept()));
96 return set; 78 return set;
97 } 79 }
98 80
99 private DLPredicate getAtomicPredicate(DLPredicate p) { 81 private DLPredicate getAtomicPredicate(DLPredicate p) {
100 if (p instanceof Equality || p instanceof AnnotatedEquality) 82 if (p instanceof Equality || p instanceof AnnotatedEquality)
101 return equality; 83 return equality;
102 if (p instanceof Inequality) 84 if (p instanceof Inequality)
103 return inequality; 85 return inequality;
104 if (p instanceof AtomicConcept) 86 if (p instanceof AtomicConcept)
105 if (p.equals(AtomicConcept.THING)) 87 if (p.equals(AtomicConcept.THING))
106 return null; 88 return null;
107 else return p; 89 else return p;
108 if (p instanceof AtomicRole) 90 if (p instanceof AtomicRole)
109 return p; 91 return p;
110 if (p instanceof AtLeastDataRange) { 92 if (p instanceof AtLeastDataRange) {
111 AtLeastDataRange aldr = (AtLeastDataRange) p; 93 AtLeastDataRange aldr = (AtLeastDataRange) p;
112 if (aldr.getOnRole() instanceof AtomicRole) 94 if (aldr.getOnRole() instanceof AtomicRole)
113 return (AtomicRole) aldr.getOnRole(); 95 return (AtomicRole) aldr.getOnRole();
114 else 96 else
115 return ((InverseRole) aldr.getOnRole()).getInverseOf(); 97 return ((InverseRole) aldr.getOnRole()).getInverseOf();
116 } 98 }
117 Utility.logDebug("Unknown DLPredicate in PredicateDependency: " + p); 99 Utility.logDebug("Unknown DLPredicate in PredicateDependency: " + p);
118 return null; 100 return null;
119 } 101 }
120 102
121 public Set<DLClause> pathTo(DLPredicate p) { 103 public Set<DLClause> pathTo(DLPredicate p) {
122 Set<DLClause> rules = new HashSet<DLClause>(); 104 Set<DLClause> rules = new HashSet<DLClause>();
123 Set<DLPredicate> visited = new HashSet<DLPredicate>(); 105 Set<DLPredicate> visited = new HashSet<DLPredicate>();
124 106
125 Queue<DLPredicate> queue = new LinkedList<DLPredicate>(); 107 Queue<DLPredicate> queue = new LinkedList<DLPredicate>();
126 queue.add(p); 108 queue.add(p);
127 visited.add(p); 109 visited.add(p);
128 110
129 Set<DLPredicate> edge; 111 Set<DLPredicate> edge;
130 Collection<DLClause> clauses; 112 Collection<DLClause> clauses;
131 113
132 while (!queue.isEmpty()) { 114 while (!queue.isEmpty()) {
133 if ((edge = reverseEdges.get(p = queue.poll())) != null) { 115 if ((edge = reverseEdges.get(p = queue.poll())) != null) {
134 for (DLPredicate pred: edge) { 116 for (DLPredicate pred: edge) {
135 if (!visited.contains(pred)) { 117 if (!visited.contains(pred)) {
136 queue.add(pred); 118 queue.add(pred);
137 visited.add(pred); 119 visited.add(pred);
138 } 120 }
139 clauses = edgeLabelsBetween(pred, p); 121 clauses = edgeLabelsBetween(pred, p);
140 if (clauses != null) rules.addAll(clauses); 122 if (clauses != null) rules.addAll(clauses);
141 } 123 }
142 } 124 }
143 } 125 }
144 return rules; 126 return rules;
145 } 127 }
146 128
147 private LinkedList<DLClause> edgeLabelsBetween(DLPredicate p, DLPredicate q) { 129 private LinkedList<DLClause> edgeLabelsBetween(DLPredicate p, DLPredicate q) {
148 PredicatePair pair = new PredicatePair(p, q); 130 PredicatePair pair = new PredicatePair(p, q);
149 return edgeLabels.get(pair); 131 return edgeLabels.get(pair);
150 } 132 }
151
152 Set<DLPredicate> reachableToBottom = null;
153 133
154 public Set<DLClause> pathToBottom(DLPredicate p) { 134 public Set<DLClause> pathToBottom(DLPredicate p) {
155 if (reachableToBottom == null) { 135 if (reachableToBottom == null) {
diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java b/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java
index 9daea7e..9b0d88e 100644
--- a/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java
+++ b/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java
@@ -1,11 +1,11 @@
1package uk.ac.ox.cs.pagoda.endomorph; 1package uk.ac.ox.cs.pagoda.endomorph;
2 2
3import uk.ac.ox.cs.pagoda.summary.NodeTuple;
4
3import java.util.Collection; 5import java.util.Collection;
4import java.util.HashSet; 6import java.util.HashSet;
5import java.util.Set; 7import java.util.Set;
6 8
7import uk.ac.ox.cs.pagoda.summary.NodeTuple;
8
9public class Clique { 9public class Clique {
10 NodeTuple representative; 10 NodeTuple representative;
11 Set<NodeTuple> nodeTuples = null; 11 Set<NodeTuple> nodeTuples = null;
diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java b/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java
index 6931ccc..5e1a700 100644
--- a/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java
+++ b/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java
@@ -1,12 +1,12 @@
1package uk.ac.ox.cs.pagoda.endomorph.plan; 1package uk.ac.ox.cs.pagoda.endomorph.plan;
2 2
3import java.util.Set;
4
5import uk.ac.ox.cs.pagoda.endomorph.Clique; 3import uk.ac.ox.cs.pagoda.endomorph.Clique;
6import uk.ac.ox.cs.pagoda.reasoner.full.Checker; 4import uk.ac.ox.cs.pagoda.reasoner.full.Checker;
7import uk.ac.ox.cs.pagoda.summary.NodeTuple; 5import uk.ac.ox.cs.pagoda.summary.NodeTuple;
8import uk.ac.ox.cs.pagoda.util.Utility; 6import uk.ac.ox.cs.pagoda.util.Utility;
9 7
8import java.util.Set;
9
10public class PlainPlan implements CheckPlan { 10public class PlainPlan implements CheckPlan {
11 11
12 Checker checker; 12 Checker checker;
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication.java
index 22f1d1d..c75083b 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication.java
@@ -1,14 +1,14 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import java.util.Collection;
4import java.util.LinkedList;
5
6import org.semanticweb.HermiT.model.DLClause; 3import org.semanticweb.HermiT.model.DLClause;
7import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
8import uk.ac.ox.cs.pagoda.rules.OverApproxDisj;
9import uk.ac.ox.cs.pagoda.rules.Program; 5import uk.ac.ox.cs.pagoda.rules.Program;
6import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxDisj;
10import uk.ac.ox.cs.pagoda.util.Timer; 7import uk.ac.ox.cs.pagoda.util.Timer;
11 8
9import java.util.Collection;
10import java.util.LinkedList;
11
12public class FoldedApplication extends MultiStageUpperProgram { 12public class FoldedApplication extends MultiStageUpperProgram {
13 13
14 public FoldedApplication(Program program, BottomStrategy upperBottom) { 14 public FoldedApplication(Program program, BottomStrategy upperBottom) {
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java b/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java
deleted file mode 100644
index 8212733..0000000
--- a/src/uk/ac/ox/cs/pagoda/multistage/FoldedApplication2.java
+++ /dev/null
@@ -1,51 +0,0 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import java.util.Collection;
4
5import org.semanticweb.HermiT.model.AtLeastConcept;
6import org.semanticweb.HermiT.model.Atom;
7import org.semanticweb.HermiT.model.DLClause;
8
9import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
10import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
11import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
12import uk.ac.ox.cs.pagoda.rules.Program;
13
14public class FoldedApplication2 extends TwoStageApplication {
15
16 public FoldedApplication2(TwoStageQueryEngine engine, DatalogProgram program, GapByStore4ID gap) {
17 super(engine, program, gap);
18 }
19
20 @Override
21 protected void addAuxiliaryRules() {
22 Collection<DLClause> overClauses;
23 DLClause disjunct;
24 Atom[] bodyAtoms;
25 int i;
26 for (DLClause constraint: constraints)
27 for (Atom headAtom: constraint.getHeadAtoms())
28 if (headAtom.getDLPredicate() instanceof AtLeastConcept) {
29 disjunct = DLClause.create(new Atom[] {headAtom}, constraint.getBodyAtoms());
30 overClauses = overExist.convert(disjunct, getOriginalClause(constraint));
31 bodyAtoms = new Atom[constraint.getBodyLength() + 1];
32 bodyAtoms[0] = getNAFAtom(headAtom);
33 i = 0;
34 for (Atom bodyAtom: constraint.getBodyAtoms())
35 bodyAtoms[++i] = bodyAtom;
36 for (DLClause overClause: overClauses)
37 if (DLClauseHelper.hasSubsetBodyAtoms(disjunct, constraint))
38 addDatalogRule(DLClause.create(new Atom[] {overClause.getHeadAtom(0)}, bodyAtoms));
39 }
40 else
41 addDatalogRule(DLClause.create(new Atom[] {headAtom}, constraint.getBodyAtoms()));
42 }
43
44 @Override
45 protected Collection<DLClause> getInitialClauses(Program program) {
46 return program.getClauses();
47 }
48
49
50
51}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java b/src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java
deleted file mode 100644
index 3d78f0a..0000000
--- a/src/uk/ac/ox/cs/pagoda/multistage/IndividualCollector.java
+++ /dev/null
@@ -1,70 +0,0 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import java.util.Collection;
4import java.util.HashSet;
5import java.util.Set;
6
7import org.openrdf.model.Resource;
8import org.openrdf.model.Statement;
9import org.openrdf.model.Value;
10import org.openrdf.model.impl.URIImpl;
11import org.openrdf.rio.RDFHandler;
12import org.openrdf.rio.RDFHandlerException;
13
14import uk.ac.ox.cs.JRDFox.model.Individual;
15import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
16import uk.ac.ox.cs.pagoda.util.Namespace;
17
18public class IndividualCollector implements RDFHandler {
19
20 boolean addedSkolemised = false;
21 Set<Individual> individuals = new HashSet<Individual>();
22
23 @Override
24 public void startRDF() throws RDFHandlerException {
25 // TODO Auto-generated method stub
26
27 }
28
29 @Override
30 public void endRDF() throws RDFHandlerException {
31 // TODO Auto-generated method stub
32
33 }
34
35 @Override
36 public void handleNamespace(String prefix, String uri)
37 throws RDFHandlerException {
38 // TODO Auto-generated method stub
39
40 }
41
42 @Override
43 public void handleStatement(Statement st) throws RDFHandlerException {
44 Resource sub = st.getSubject();
45 if (sub instanceof URIImpl)
46 individuals.add(Individual.create(((URIImpl) sub).toString()));
47 if (!st.getPredicate().toString().equals(Namespace.RDF_TYPE)) {
48 Value obj = st.getObject();
49 if (obj instanceof URIImpl)
50 individuals.add(Individual.create(((URIImpl) sub).toString()));
51 }
52 }
53
54 @Override
55 public void handleComment(String comment) throws RDFHandlerException {
56 // TODO Auto-generated method stub
57
58 }
59
60 public Collection<Individual> getAllIndividuals() {
61 if (!addedSkolemised) {
62 int number = OverApproxExist.getNumberOfSkolemisedIndividual();
63 for (int i = 0; i < number; ++i)
64 individuals.add(Individual.create(OverApproxExist.skolemisedIndividualPrefix + i));
65 addedSkolemised = true;
66 }
67 return individuals;
68 }
69
70}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java
new file mode 100644
index 0000000..b548d39
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/multistage/LimitedSkolemisationApplication.java
@@ -0,0 +1,16 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3
4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
5import uk.ac.ox.cs.pagoda.rules.Program;
6import uk.ac.ox.cs.pagoda.rules.approximators.LimitedSkolemisationApproximator;
7
8public class LimitedSkolemisationApplication extends RestrictedApplication {
9
10 public static final int MAX_DEPTH = 1;
11
12 public LimitedSkolemisationApplication(Program program, BottomStrategy upperBottom) {
13 super(program, upperBottom);
14 m_approxExist = new LimitedSkolemisationApproximator(MAX_DEPTH);
15 }
16}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
index b77c264..50996d0 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java
@@ -1,16 +1,18 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import java.util.Collection; 3import uk.ac.ox.cs.JRDFox.JRDFStoreException;
4
5import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
6import uk.ac.ox.cs.pagoda.multistage.treatement.*; 5import uk.ac.ox.cs.pagoda.multistage.treatement.Pick4NegativeConceptNaive;
6import uk.ac.ox.cs.pagoda.multistage.treatement.Pick4NegativeConceptQuerySpecific;
7import uk.ac.ox.cs.pagoda.multistage.treatement.Treatment;
7import uk.ac.ox.cs.pagoda.query.GapByStore4ID; 8import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
8import uk.ac.ox.cs.pagoda.query.QueryRecord; 9import uk.ac.ox.cs.pagoda.query.QueryRecord;
9import uk.ac.ox.cs.pagoda.rules.DatalogProgram; 10import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
10import uk.ac.ox.cs.pagoda.rules.Program; 11import uk.ac.ox.cs.pagoda.rules.Program;
11import uk.ac.ox.cs.pagoda.util.Timer; 12import uk.ac.ox.cs.pagoda.util.Timer;
12import uk.ac.ox.cs.pagoda.util.Utility; 13import uk.ac.ox.cs.pagoda.util.Utility;
13import uk.ac.ox.cs.JRDFox.JRDFStoreException; 14
15import java.util.Collection;
14 16
15public class MultiStageQueryEngine extends StageQueryEngine { 17public class MultiStageQueryEngine extends StageQueryEngine {
16 18
@@ -42,7 +44,7 @@ public class MultiStageQueryEngine extends StageQueryEngine {
42 RestrictedApplication program = new RestrictedApplication(generalProgram, dProgram.getUpperBottomStrategy()); 44 RestrictedApplication program = new RestrictedApplication(generalProgram, dProgram.getUpperBottomStrategy());
43 Treatment treatment = new Pick4NegativeConceptNaive(this, program); 45 Treatment treatment = new Pick4NegativeConceptNaive(this, program);
44 int ret = materialise(program, treatment, gap); 46 int ret = materialise(program, treatment, gap);
45 treatment.dispose(); 47 treatment.dispose(); // does nothing
46 return ret; 48 return ret;
47 } 49 }
48 50
@@ -53,6 +55,18 @@ public class MultiStageQueryEngine extends StageQueryEngine {
53 treatment.dispose(); 55 treatment.dispose();
54 return ret; 56 return ret;
55 } 57 }
58
59 /**
60 * delta-chase
61 */
62 @Override
63 public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap) {
64 materialise("lower program", dProgram.getLower().toString());
65 Program generalProgram = dProgram.getGeneral();
66 LimitedSkolemisationApplication program = new LimitedSkolemisationApplication(generalProgram, dProgram.getUpperBottomStrategy());
67 Treatment treatment = new Pick4NegativeConceptNaive(this, program);
68 return materialise(program, treatment, gap);
69 }
56 70
57 private int materialise(MultiStageUpperProgram program, Treatment treatment, GapByStore4ID gap) { 71 private int materialise(MultiStageUpperProgram program, Treatment treatment, GapByStore4ID gap) {
58 if (gap != null) 72 if (gap != null)
@@ -67,11 +81,11 @@ public class MultiStageQueryEngine extends StageQueryEngine {
67 // TODO to be removed ... 81 // TODO to be removed ...
68// if (gap == null) 82// if (gap == null)
69// program.save("output/multi.dlog"); 83// program.save("output/multi.dlog");
70 84
71 Collection<Violation> violations = null; 85 Collection<Violation> violations;
72 int iteration = 0; 86 int iteration = 0;
73 Timer subTimer = new Timer(); 87 Timer subTimer = new Timer();
74 boolean incrementally = false; 88 boolean incrementally;
75 try { 89 try {
76 while (true) { 90 while (true) {
77 long oldTripleCount = store.getTriplesCount(); 91 long oldTripleCount = store.getTriplesCount();
@@ -104,7 +118,7 @@ public class MultiStageQueryEngine extends StageQueryEngine {
104 118
105 if (!isValid()) { 119 if (!isValid()) {
106 if (iteration == 1) { 120 if (iteration == 1) {
107 Utility.logInfo("The ontology is incosistent."); 121 Utility.logInfo("The ontology is inconsistent.");
108 return -1; 122 return -1;
109 } 123 }
110 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds."); 124 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds.");
@@ -119,8 +133,8 @@ public class MultiStageQueryEngine extends StageQueryEngine {
119 subTimer.reset(); 133 subTimer.reset();
120 if ((violations = program.isIntegrated(this, incrementally)) == null || violations.size() == 0) { 134 if ((violations = program.isIntegrated(this, incrementally)) == null || violations.size() == 0) {
121 store.clearRulesAndMakeFactsExplicit(); 135 store.clearRulesAndMakeFactsExplicit();
122 Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - tripleCountBeforeMat) + " new)"); 136 Utility.logInfo(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - tripleCountBeforeMat) + " new)");
123 Utility.logInfo(name + " store is DONE for multi-stage materialising in " + t.duration() + " seconds."); 137 Utility.logInfo(name + " store is DONE for multi-stage materialising in " + t.duration() + " seconds.");
124 return isValid() ? 1 : 0; 138 return isValid() ? 1 : 0;
125 } 139 }
126 Utility.logDebug("Time to detect violations: " + subTimer.duration()); 140 Utility.logDebug("Time to detect violations: " + subTimer.duration());
@@ -129,6 +143,10 @@ public class MultiStageQueryEngine extends StageQueryEngine {
129 subTimer.reset(); 143 subTimer.reset();
130 oldTripleCount = store.getTriplesCount(); 144 oldTripleCount = store.getTriplesCount();
131 for (Violation v : violations) { 145 for (Violation v : violations) {
146
147 Utility.logDebug("Dealing with violation: " + v.constraint);
148 Utility.logDebug("Number of violation tuples: " + v.size());
149
132 Timer localTimer = new Timer(); 150 Timer localTimer = new Timer();
133 int number = v.size(); 151 int number = v.size();
134 long vOldCounter = store.getTriplesCount(); 152 long vOldCounter = store.getTriplesCount();
@@ -137,8 +155,8 @@ public class MultiStageQueryEngine extends StageQueryEngine {
137 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds."); 155 Utility.logInfo(name + " store FAILED for multi-stage materialisation in " + t.duration() + " seconds.");
138 return 0; 156 return 0;
139 } 157 }
140 Utility.logDebug("Time to make the constraint being satisfied: " + localTimer.duration() + " " + number + " tuples for " + v.constraint); 158 Utility.logDebug("Time to make the constraint being satisfied: " + localTimer.duration());
141 Utility.logDebug("tuple number: " + v.size() + " before: " + vOldCounter + " after: " + store.getTriplesCount() + " (" + (store.getTriplesCount() - vOldCounter) + " new) ." ); 159 Utility.logDebug("Triples in the store: before=" + vOldCounter + ", after=" + store.getTriplesCount() + ", new=" + (store.getTriplesCount() - vOldCounter));
142 } 160 }
143 Utility.logDebug(name + " store after adding facts for violations: " + (tripleCount = store.getTriplesCount()) + " (" + (tripleCount - oldTripleCount) + " new)"); 161 Utility.logDebug(name + " store after adding facts for violations: " + (tripleCount = store.getTriplesCount()) + " (" + (tripleCount - oldTripleCount) + " new)");
144 Utility.logDebug("Time to add triples for violations: " + subTimer.duration()); 162 Utility.logDebug("Time to add triples for violations: " + subTimer.duration());
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java
index 4239ccc..1664c99 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageUpperProgram.java
@@ -1,254 +1,284 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import java.io.BufferedWriter; 3import org.semanticweb.HermiT.model.*;
4import java.io.FileNotFoundException; 4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
5import java.io.FileOutputStream; 5import uk.ac.ox.cs.JRDFox.store.TupleIterator;
6import java.io.IOException;
7import java.io.OutputStreamWriter;
8import java.util.Collection;
9import java.util.HashMap;
10import java.util.HashSet;
11import java.util.Iterator;
12import java.util.LinkedList;
13import java.util.Map;
14import java.util.Set;
15
16import org.semanticweb.HermiT.model.AnnotatedEquality;
17import org.semanticweb.HermiT.model.AtLeast;
18import org.semanticweb.HermiT.model.AtLeastConcept;
19import org.semanticweb.HermiT.model.AtLeastDataRange;
20import org.semanticweb.HermiT.model.Atom;
21import org.semanticweb.HermiT.model.AtomicConcept;
22import org.semanticweb.HermiT.model.AtomicNegationConcept;
23import org.semanticweb.HermiT.model.AtomicRole;
24import org.semanticweb.HermiT.model.DLClause;
25import org.semanticweb.HermiT.model.DLPredicate;
26import org.semanticweb.HermiT.model.Equality;
27import org.semanticweb.HermiT.model.Inequality;
28import org.semanticweb.HermiT.model.InverseRole;
29import org.semanticweb.HermiT.model.Variable;
30
31import uk.ac.ox.cs.pagoda.MyPrefixes; 6import uk.ac.ox.cs.pagoda.MyPrefixes;
32import uk.ac.ox.cs.pagoda.constraints.*; 7import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
33import uk.ac.ox.cs.pagoda.hermit.RuleHelper; 8import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
34import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager; 9import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
35import uk.ac.ox.cs.pagoda.rules.Approximator; 10import uk.ac.ox.cs.pagoda.rules.ExistConstantApproximator;
36import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
37import uk.ac.ox.cs.pagoda.rules.Program; 11import uk.ac.ox.cs.pagoda.rules.Program;
12import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
13import uk.ac.ox.cs.pagoda.rules.approximators.TupleDependentApproximator;
38import uk.ac.ox.cs.pagoda.util.Namespace; 14import uk.ac.ox.cs.pagoda.util.Namespace;
39import uk.ac.ox.cs.pagoda.util.SparqlHelper; 15import uk.ac.ox.cs.pagoda.util.SparqlHelper;
40import uk.ac.ox.cs.pagoda.util.Timer; 16import uk.ac.ox.cs.pagoda.util.Timer;
41import uk.ac.ox.cs.pagoda.util.Utility; 17import uk.ac.ox.cs.pagoda.util.Utility;
42import uk.ac.ox.cs.JRDFox.JRDFStoreException; 18import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
43import uk.ac.ox.cs.JRDFox.store.TupleIterator;
44 19
45public abstract class MultiStageUpperProgram { 20import java.io.*;
46 21import java.util.*;
47 Set<DLClause> constraints = new HashSet<DLClause>();
48 Set<DLClause> rules = new HashSet<DLClause>();
49 Collection<DLClause> clauses;
50 22
51 BottomStrategy m_bottom = null; 23public abstract class MultiStageUpperProgram {
52 Approximator m_approxExist = new OverApproxExist();
53 24
54 protected static final Variable X = Variable.create("X"); 25 protected static final Variable X = Variable.create("X");
55 26 Set<DLClause> constraints = new HashSet<DLClause>();
27 Set<DLClause> rules = new HashSet<DLClause>();
28 Collection<DLClause> clauses;
29 BottomStrategy m_bottom = null;
30 TupleDependentApproximator m_approxExist = new ExistConstantApproximator();
31 Map<DLClause, DLClause> map = new HashMap<DLClause, DLClause>();
32 Set<DLPredicate> updatedPredicates = new HashSet<DLPredicate>();
56 private MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes; 33 private MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes;
57 Map<DLClause, DLClause> map = new HashMap<DLClause, DLClause>(); 34 private StringBuilder datalogRuleText = new StringBuilder();
35 private Timer t = new Timer();
58 36
59 public MultiStageUpperProgram(Program program, BottomStrategy upperBottom) { 37 public MultiStageUpperProgram(Program program, BottomStrategy upperBottom) {
60 m_bottom = upperBottom; 38 m_bottom = upperBottom;
61 clauses = getInitialClauses(program); 39 clauses = getInitialClauses(program);
62 Collection<DLClause> introducedConstraints = new LinkedList<DLClause>(); 40 Collection<DLClause> introducedConstraints = new LinkedList<DLClause>();
63 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>(); 41 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>();
64 for (DLClause clause: m_bottom.process(clauses)) { 42 for (DLClause clause: m_bottom.process(clauses)) {
65 if (m_bottom.isBottomRule(clause) || clause.getHeadLength() == 1 && !(clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast)) 43 if (m_bottom.isBottomRule(clause) || clause.getHeadLength() == 1 && !(clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast))
66 addDatalogRule(clause); 44 addDatalogRule(clause);
67 else { 45 else {
68 newHeadAtoms.clear(); 46 newHeadAtoms.clear();
69 boolean changed = false; 47 boolean changed = false;
70 for (Atom atom: clause.getHeadAtoms()) { 48 for (Atom atom: clause.getHeadAtoms()) {
71 if (atom.getDLPredicate() instanceof AtLeastConcept) { 49 if (atom.getDLPredicate() instanceof AtLeastConcept) {
72 AtLeastConcept atLeast = (AtLeastConcept) atom.getDLPredicate(); 50 AtLeastConcept atLeast = (AtLeastConcept) atom.getDLPredicate();
73 if (atLeast.getToConcept() instanceof AtomicNegationConcept) { 51 if (atLeast.getToConcept() instanceof AtomicNegationConcept) {
74 AtomicConcept positive = ((AtomicNegationConcept) atLeast.getToConcept()).getNegatedAtomicConcept(); 52 AtomicConcept positive = ((AtomicNegationConcept) atLeast.getToConcept()).getNegatedAtomicConcept();
75 AtomicConcept negative = OverApproxExist.getNegationConcept(positive); 53 AtomicConcept negative = OverApproxExist.getNegationConcept(positive);
76 Atom atom1 = Atom.create(positive, X); 54 Atom atom1 = Atom.create(positive, X);
77 Atom atom2 = Atom.create(negative, X); 55 Atom atom2 = Atom.create(negative, X);
78 introducedConstraints.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2})); 56 introducedConstraints.add(DLClause.create(new Atom[0], new Atom[]{atom1, atom2}));
79 newHeadAtoms.add( 57 newHeadAtoms.add(
80 Atom.create( 58 Atom.create(
81 AtLeastConcept.create(atLeast.getArity(), atLeast.getOnRole(), negative), 59 AtLeastConcept.create(atLeast.getArity(), atLeast.getOnRole(), negative),
82 atom.getArgument(0))); 60 atom.getArgument(0)));
83 changed = true; 61 changed = true;
84 continue; 62 continue;
85 } 63 }
86 } 64 }
87 else if (atom.getDLPredicate() instanceof AtLeastDataRange) 65 else if (atom.getDLPredicate() instanceof AtLeastDataRange)
88 changed = true; 66 changed = true;
89 else 67 else
90 newHeadAtoms.add(atom); 68 newHeadAtoms.add(atom);
91 69
92 } 70 }
93 if (!changed) constraints.add(clause); 71 if (!changed) constraints.add(clause);
94 else if (!newHeadAtoms.isEmpty()) { 72 else if (!newHeadAtoms.isEmpty()) {
95 DLClause newClause = DLClause.create(newHeadAtoms.toArray(new Atom[0]), clause.getBodyAtoms()); 73 DLClause newClause = DLClause.create(newHeadAtoms.toArray(new Atom[0]), clause.getBodyAtoms());
96 map.put(newClause, getOriginalClause(clause)); 74 map.put(newClause, getOriginalClause(clause));
97 constraints.add(newClause); 75 constraints.add(newClause);
98 } 76 }
99 } 77 }
100 } 78 }
101 79
102 for (DLClause clause: m_bottom.process(introducedConstraints)) 80 for (DLClause clause: m_bottom.process(introducedConstraints))
103 addDatalogRule(clause); 81 addDatalogRule(clause);
104 } 82 }
105 83
106 protected void addDatalogRule(DLClause clause) {
107 rules.add(clause);
108 datalogRuleText.append(RuleHelper.getText(clause)).append(Utility.LINE_SEPARATOR);
109 }
110
111 public static Atom getNegativeAtom(Atom atom) { 84 public static Atom getNegativeAtom(Atom atom) {
112 if (atom.getDLPredicate() instanceof AtomicConcept) 85 if (atom.getDLPredicate() instanceof AtomicConcept)
113 return Atom.create(OverApproxExist.getNegationConcept(atom.getDLPredicate()), atom.getArgument(0)); 86 return Atom.create(OverApproxExist.getNegationConcept(atom.getDLPredicate()), atom.getArgument(0));
114 87
115 if (atom.getDLPredicate() instanceof Inequality) 88 if (atom.getDLPredicate() instanceof Inequality)
116 return Atom.create(Equality.INSTANCE, atom.getArgument(0), atom.getArgument(1)); 89 return Atom.create(Equality.INSTANCE, atom.getArgument(0), atom.getArgument(1));
117 90
118 if (atom.getDLPredicate() instanceof Equality || atom.getDLPredicate() instanceof AnnotatedEquality) 91 if (atom.getDLPredicate() instanceof Equality || atom.getDLPredicate() instanceof AnnotatedEquality)
119 return Atom.create(Inequality.INSTANCE, atom.getArgument(0), atom.getArgument(1)); 92 return Atom.create(Inequality.INSTANCE, atom.getArgument(0), atom.getArgument(1));
120 93
121 return null; 94 return null;
122 } 95 }
123 96
124 private StringBuilder datalogRuleText = new StringBuilder(); 97 public static AnswerTupleID project(AnswerTupleID tuple, String[] vars, String[] subVars) {
98 int subArity = 0;
99 for (int i = 0; i < subVars.length; ++i)
100 if (subVars[i] != null) ++subArity;
101
102 if (tuple.getArity() == subArity)
103 return tuple;
104
105 AnswerTupleID newTuple = new AnswerTupleID(subArity);
106 for (int i = 0, j = 0; i < vars.length; ++i)
107 if (subVars[i] != null && !subVars[i].isEmpty()) {
108 newTuple.setTerm(j++, tuple.getTerm(i));
109 }
110
111 return newTuple;
112 }
113
114 public static String[] getVarSubset(String[] vars, Atom... atoms) {
115 String[] newVars = new String[vars.length];
116 Set<Variable> allVars = new HashSet<Variable>();
117 int arity;
118 for (Atom atom : atoms) {
119 arity = atom.getArity();
120 if (atom.getDLPredicate() instanceof AnnotatedEquality) arity = 2;
121 for (int j = 0; j < arity; ++j)
122 if (atom.getArgument(j) instanceof Variable) {
123 allVars.add(atom.getArgumentVariable(j));
124 }
125 }
126
127 for (int i = 0; i < vars.length; ++i) {
128 newVars[i] = allVars.contains(Variable.create(vars[i])) ? vars[i] : null;
129 }
130
131 return newVars;
132 }
133
134 public static String[] getCommonVars(DLClause clause) {
135 Set<Variable> headVars = getVariables(clause.getHeadAtoms());
136 Set<Variable> bodyVars = getVariables(clause.getBodyAtoms());
137
138 Collection<String> common = new LinkedList<String>();
139 for (Variable v : headVars)
140 if (bodyVars.contains(v)) common.add(v.getName());
141
142 return common.toArray(new String[0]);
143 }
144
145 public static Set<Variable> getVariables(Atom[] atoms) {
146 Set<Variable> v = new HashSet<Variable>();
147 for (Atom atom : atoms) atom.getVariables(v);
148 return v;
149 }
150
151 protected void addDatalogRule(DLClause clause) {
152 rules.add(clause);
153 datalogRuleText.append(RuleHelper.getText(clause)).append(Utility.LINE_SEPARATOR);
154 }
125 155
126 public String getDatalogRuleText() { 156 public String getDatalogRuleText() {
127 StringBuilder program = new StringBuilder(); 157 StringBuilder program = new StringBuilder();
128 program.append(prefixes.prefixesText()); 158 program.append(prefixes.prefixesText());
129 program.append(datalogRuleText.toString()); 159 program.append(datalogRuleText.toString());
130 return program.toString(); 160 return program.toString();
131 } 161 }
132 162
133 protected void addDerivedPredicate(MultiStageQueryEngine engine) { 163 protected void addDerivedPredicate(MultiStageQueryEngine engine) {
134 TupleIterator derivedTuples = null; 164 TupleIterator derivedTuples = null;
135 try { 165 try {
136 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?z where { ?x <" + Namespace.RDF_TYPE + "> ?z . }"); 166 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?z where { ?x <" + Namespace.RDF_TYPE + "> ?z . }");
137 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) { 167 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) {
138 String p = prefixes.expandIRI(RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0))); 168 String p = prefixes.expandIRI(RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0)));
139 updatedPredicates.add(AtomicConcept.create(p)); 169 updatedPredicates.add(AtomicConcept.create(p));
140 } 170 }
141 } catch (JRDFStoreException e) { 171 } catch (JRDFStoreException e) {
142 e.printStackTrace(); 172 e.printStackTrace();
143 } finally { 173 } finally {
144 if (derivedTuples != null) derivedTuples.dispose(); 174 if (derivedTuples != null) derivedTuples.dispose();
145 } 175 }
146 176
147 derivedTuples = null; 177 derivedTuples = null;
148 try { 178 try {
149 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?y where { ?x ?y ?z . }"); 179 derivedTuples = engine.internal_evaluateAgainstIDBs("select distinct ?y where { ?x ?y ?z . }");
150 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) { 180 for (long multi = derivedTuples.open(); multi != 0; multi = derivedTuples.getNext()) {
151 String p = RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0)); 181 String p = RDFoxTripleManager.getQuotedTerm(derivedTuples.getResource(0));
152 if (p.equals(Namespace.RDF_TYPE_ABBR) || p.equals(Namespace.RDF_TYPE_QUOTED)) ; 182 if (p.equals(Namespace.RDF_TYPE_ABBR) || p.equals(Namespace.RDF_TYPE_QUOTED)) ;
153 else if (p.equals(Namespace.EQUALITY_ABBR) || p.equals(Namespace.EQUALITY_QUOTED)); 183 else if (p.equals(Namespace.EQUALITY_ABBR) || p.equals(Namespace.EQUALITY_QUOTED));
154 else updatedPredicates.add(AtomicRole.create(prefixes.expandIRI(p))); 184 else updatedPredicates.add(AtomicRole.create(prefixes.expandIRI(p)));
155 } 185 }
156 } catch (JRDFStoreException e) { 186 } catch (JRDFStoreException e) {
157 e.printStackTrace(); 187 e.printStackTrace();
158 } finally { 188 } finally {
159 if (derivedTuples != null) derivedTuples.dispose(); 189 if (derivedTuples != null) derivedTuples.dispose();
160 } 190 }
161 191
162 } 192 }
163 193
164 public abstract Collection<Violation> isIntegrated(MultiStageQueryEngine engine, boolean incrementally); 194 public abstract Collection<Violation> isIntegrated(MultiStageQueryEngine engine, boolean incrementally);
165 195
166 protected Violation violate(MultiStageQueryEngine engine, DLClause clause, boolean incrementally) { 196 protected Violation violate(MultiStageQueryEngine engine, DLClause clause, boolean incrementally) {
167 Utility.logTrace("checking constraint: " + clause); 197 Utility.logTrace("checking constraint: " + clause);
168 198
169 String[] vars = getCommonVars(clause), subVars; 199 String[] vars = getCommonVars(clause), subVars;
170 200
171 Set<AnswerTupleID> headAnswers = new HashSet<AnswerTupleID>(); 201 Set<AnswerTupleID> headAnswers = new HashSet<AnswerTupleID>();
172 Set<Integer> rootAnswers = new HashSet<Integer>(); 202 Set<Integer> rootAnswers = new HashSet<Integer>();
173 203
174 Set<Atom> restHeadAtoms = new HashSet<Atom>(); 204 Set<Atom> restHeadAtoms = new HashSet<Atom>();
175 Atom rootAtom = null; 205 Atom rootAtom = null;
176 for (Atom atom: clause.getHeadAtoms()) 206 for (Atom atom: clause.getHeadAtoms())
177 if (rootAtom == null && atom.getDLPredicate() instanceof AtomicConcept && atom.getArgument(0).equals(X)) 207 if (rootAtom == null && atom.getDLPredicate() instanceof AtomicConcept && atom.getArgument(0).equals(X))
178 rootAtom = atom; 208 rootAtom = atom;
179 else 209 else
180 restHeadAtoms.add(atom); 210 restHeadAtoms.add(atom);
181 if (rootAtom != null) { 211 if (rootAtom != null) {
182 subVars = getVarSubset(vars, rootAtom); 212 subVars = getVarSubset(vars, rootAtom);
183 getHeadAnswers(engine, rootAtom, subVars, headAnswers); 213 getHeadAnswers(engine, rootAtom, subVars, headAnswers);
184 for (AnswerTupleID tuple: headAnswers) rootAnswers.add(tuple.getTerm(0)); 214 for (AnswerTupleID tuple : headAnswers) rootAnswers.add(tuple.getTerm(0));
185 headAnswers.clear(); 215 headAnswers.clear();
186 } 216 }
187 217
188 if (incrementally) { 218 if (incrementally) {
189 boolean affected = false; 219 boolean affected = false;
190 for (Atom bodyAtom: clause.getBodyAtoms()) 220 for (Atom bodyAtom: clause.getBodyAtoms())
191 if (updatedPredicates.contains(bodyAtom.getDLPredicate())) { 221 if (updatedPredicates.contains(bodyAtom.getDLPredicate())) {
192 affected = true; 222 affected = true;
193 break; 223 break;
194 } 224 }
195 225
196 for (Atom headAtom: clause.getHeadAtoms()) 226 for (Atom headAtom: clause.getHeadAtoms())
197 if (headAtom.getDLPredicate() instanceof AtLeastConcept && 227 if (headAtom.getDLPredicate() instanceof AtLeastConcept &&
198 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg")) 228 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg"))
199 if (updatedPredicates.contains(OverApproxExist.getNegationConcept(((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept())))) { 229 if (updatedPredicates.contains(OverApproxExist.getNegationConcept(((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept())))) {
200 affected = true; 230 affected = true;
201 break; 231 break;
202 } 232 }
203 233
204 if (!affected) return null; 234 if (!affected) return null;
205 } 235 }
206 236
207 LinkedList<AnswerTupleID> bodyAnswers = getBodyAnswers(engine, clause, vars, rootAnswers); 237 LinkedList<AnswerTupleID> bodyAnswers = getBodyAnswers(engine, clause, vars, rootAnswers);
208 rootAnswers.clear(); 238 rootAnswers.clear();
209 239
210 Utility.logTrace("Time to compute body answers: " + t.duration() + " number: " + bodyAnswers.size()); 240 Utility.logTrace("Time to compute body answers: " + t.duration() + " number: " + bodyAnswers.size());
211 241
212 if (bodyAnswers.isEmpty()) return null; 242 if (bodyAnswers.isEmpty()) return null;
213 243
214 t.reset(); 244 t.reset();
215 245
216 for (Atom headAtom: restHeadAtoms) { 246 for (Atom headAtom: restHeadAtoms) {
217// Timer subTimer = new Timer(); 247// Timer subTimer = new Timer();
218 subVars = getVarSubset(vars, headAtom); 248 subVars = getVarSubset(vars, headAtom);
219 249
220 // TODO: conditions check negative existential restrictions 250 // TODO: conditions check negative existential restrictions
221// if (false) { 251// if (false) {
222 if (headAtom.getDLPredicate() instanceof AtLeastConcept && 252 if (headAtom.getDLPredicate() instanceof AtLeastConcept &&
223 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg")) { 253 ((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()).getIRI().endsWith("_neg")) {
224 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate(); 254 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate();
225 String x = null, y = "Y"; 255 String x = null, y = "Y";
226 for (String var: subVars) 256 for (String var: subVars)
227 if (var != null) x = var; 257 if (var != null) x = var;
228 if (x == "Y") y = "X"; 258 if (x == "Y") y = "X";
229 Atom[] atoms = new Atom[2]; 259 Atom[] atoms = new Atom[2];
230 if (alc.getOnRole() instanceof AtomicRole) 260 if (alc.getOnRole() instanceof AtomicRole)
231 atoms[0] = Atom.create((AtomicRole) alc.getOnRole(), Variable.create(x), Variable.create(y)); 261 atoms[0] = Atom.create((AtomicRole) alc.getOnRole(), Variable.create(x), Variable.create(y));
232 else 262 else
233 atoms[0] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), Variable.create(y), Variable.create(x)); 263 atoms[0] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), Variable.create(y), Variable.create(x));
234 264
235 atoms[1] = Atom.create(OverApproxExist.getNegationConcept((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()), Variable.create(y)); 265 atoms[1] = Atom.create(OverApproxExist.getNegationConcept((AtomicConcept) ((AtLeastConcept) headAtom.getDLPredicate()).getToConcept()), Variable.create(y));
236 Set<AnswerTupleID> addAnswers = new HashSet<AnswerTupleID>(); 266 Set<AnswerTupleID> addAnswers = new HashSet<AnswerTupleID>();
237 TupleIterator tuples = null; 267 TupleIterator tuples = null;
238 try { 268 try {
239 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(new Atom[] {atoms[0]}, x, y)); 269 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(new Atom[]{atoms[0]}, x, y));
240 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) 270 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext())
241 addAnswers.add(new AnswerTupleID(tuples)); 271 addAnswers.add(new AnswerTupleID(tuples));
242 } catch (JRDFStoreException e) { 272 } catch (JRDFStoreException e) {
243 e.printStackTrace(); 273 e.printStackTrace();
244 } finally { 274 } finally {
245 if (tuples != null) tuples.dispose(); 275 if (tuples != null) tuples.dispose();
246 } 276 }
247 277
248 tuples = null; 278 tuples = null;
249 try { 279 try {
250 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(atoms, x, y)); 280 tuples = engine.internal_evaluateNotExpanded(SparqlHelper.getSPARQLQuery(atoms, x, y));
251 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) 281 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext())
252 addAnswers.remove(new AnswerTupleID(tuples)); 282 addAnswers.remove(new AnswerTupleID(tuples));
253 } catch (JRDFStoreException e) { 283 } catch (JRDFStoreException e) {
254 e.printStackTrace(); 284 e.printStackTrace();
@@ -260,144 +290,95 @@ public abstract class MultiStageUpperProgram {
260 headAnswers.add(project(tuple, new String[] {x, y}, new String[] {x, null})); 290 headAnswers.add(project(tuple, new String[] {x, y}, new String[] {x, null}));
261 addAnswers.clear(); 291 addAnswers.clear();
262 } 292 }
263 293
264 getHeadAnswers(engine, headAtom, subVars, headAnswers); 294 getHeadAnswers(engine, headAtom, subVars, headAnswers);
265 for (Iterator<AnswerTupleID> iter = bodyAnswers.iterator(); iter.hasNext(); ) 295 for (Iterator<AnswerTupleID> iter = bodyAnswers.iterator(); iter.hasNext(); )
266 if (headAnswers.contains(project(iter.next(), vars, subVars))) 296 if (headAnswers.contains(project(iter.next(), vars, subVars)))
267 iter.remove(); 297 iter.remove();
268 headAnswers.clear(); 298 headAnswers.clear();
269 } 299 }
270 300
271 Utility.logTrace("Time to rule out all head answers: " + t.duration() + " rest number: " + bodyAnswers.size()); 301 Utility.logTrace("Time to rule out all head answers: " + t.duration() + " rest number: " + bodyAnswers.size());
272 302
273 if (bodyAnswers.isEmpty()) return null; 303 if (bodyAnswers.isEmpty()) return null;
274 304
275 return new Violation(clause, bodyAnswers, vars); 305 return new Violation(clause, bodyAnswers, vars);
276 } 306 }
277 307
278 private void getHeadAnswers(MultiStageQueryEngine engine, Atom headAtom, String[] commonVars, Set<AnswerTupleID> headAnswers) { 308 private void getHeadAnswers(MultiStageQueryEngine engine, Atom headAtom, String[] commonVars, Set<AnswerTupleID> headAnswers) {
279 String headQuery = SparqlHelper.getSPARQLQuery(new Atom[] { headAtom }, commonVars); 309 String headQuery = SparqlHelper.getSPARQLQuery(new Atom[]{headAtom}, commonVars);
280 TupleIterator tuples = null; 310 TupleIterator tuples = null;
281 try { 311 try {
282 tuples = engine.internal_evaluateNotExpanded(headQuery); 312 tuples = engine.internal_evaluateNotExpanded(headQuery);
283 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) 313 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext())
284 headAnswers.add(new AnswerTupleID(tuples)); 314 headAnswers.add(new AnswerTupleID(tuples));
285 } catch (JRDFStoreException e) { 315 } catch (JRDFStoreException e) {
286 e.printStackTrace(); 316 e.printStackTrace();
287 } finally { 317 } finally {
288 if (tuples != null) tuples.dispose(); 318 if (tuples != null) tuples.dispose();
289 } 319 }
290 } 320 }
291 321
292 private Timer t = new Timer();
293
294 protected LinkedList<AnswerTupleID> getBodyAnswers(MultiStageQueryEngine engine, DLClause clause, String[] vars, Set<Integer> rootAnswers) { //, boolean incrementally) { 322 protected LinkedList<AnswerTupleID> getBodyAnswers(MultiStageQueryEngine engine, DLClause clause, String[] vars, Set<Integer> rootAnswers) { //, boolean incrementally) {
295 Collection<int[]> validIndexes = new LinkedList<int[]>(); 323 Collection<int[]> validIndexes = new LinkedList<int[]>();
296 324
297 int rootVarIndex = -1; 325 int rootVarIndex = -1;
298 for (int i = 0; i < vars.length; ++i) 326 for (int i = 0; i < vars.length; ++i)
299 if (vars[i].equals("X")) { rootVarIndex = i; break; } 327 if (vars[i].equals("X")) {
300 328 rootVarIndex = i;
301 String[] subVars; 329 break;
330 }
331
332 String[] subVars;
302 for (Atom headAtom: clause.getHeadAtoms()) { 333 for (Atom headAtom: clause.getHeadAtoms()) {
303 if ((headAtom.getDLPredicate() instanceof Equality || headAtom.getDLPredicate() instanceof AnnotatedEquality) 334 if ((headAtom.getDLPredicate() instanceof Equality || headAtom.getDLPredicate() instanceof AnnotatedEquality)
304 && headAtom.getArgument(0) instanceof Variable && headAtom.getArgument(1) instanceof Variable) { 335 && headAtom.getArgument(0) instanceof Variable && headAtom.getArgument(1) instanceof Variable) {
305 int[] validIndex = new int[2]; 336 int[] validIndex = new int[2];
306 subVars = getVarSubset(vars, headAtom); 337 subVars = getVarSubset(vars, headAtom);
307 for (int i = 0, j = 0; i < subVars.length; ++i) 338 for (int i = 0, j = 0; i < subVars.length; ++i)
308 if (subVars[i] != null) 339 if (subVars[i] != null)
309 validIndex[j++] = i; 340 validIndex[j++] = i;
310 validIndexes.add(validIndex); 341 validIndexes.add(validIndex);
311 } 342 }
312 } 343 }
313 344
314 t.reset(); 345 t.reset();
315 346
316 LinkedList<AnswerTupleID> bodyAnswers = new LinkedList<AnswerTupleID>(); 347 LinkedList<AnswerTupleID> bodyAnswers = new LinkedList<AnswerTupleID>();
317 String bodyQuery = SparqlHelper.getSPARQLQuery(clause.getBodyAtoms(), vars); 348 String bodyQuery = SparqlHelper.getSPARQLQuery(clause.getBodyAtoms(), vars);
318 TupleIterator bodyTuples = null; 349 TupleIterator bodyTuples = null;
319 350
320 boolean filtered; 351 boolean filtered;
321 try { 352 try {
322 bodyTuples = engine.internal_evaluateNotExpanded(bodyQuery); 353 bodyTuples = engine.internal_evaluateNotExpanded(bodyQuery);
323 for (long multi = bodyTuples.open(); multi != 0; multi = bodyTuples.getNext()) { 354 for (long multi = bodyTuples.open(); multi != 0; multi = bodyTuples.getNext()) {
324 filtered = false; 355 filtered = false;
325 if (rootVarIndex >= 0 && rootAnswers.contains(bodyTuples.getResourceID(rootVarIndex))) continue; 356 if (rootVarIndex >= 0 && rootAnswers.contains(bodyTuples.getResourceID(rootVarIndex))) continue;
326 for (int[] validIndex: validIndexes) 357 for (int[] validIndex: validIndexes)
327 if (bodyTuples.getResourceID(validIndex[0]) == bodyTuples.getResourceID(validIndex[1])) { 358 if (bodyTuples.getResourceID(validIndex[0]) == bodyTuples.getResourceID(validIndex[1])) {
328 filtered = true; 359 filtered = true;
329 break; 360 break;
330 } 361 }
331 if (!filtered) 362 if (!filtered)
332 bodyAnswers.add(new AnswerTupleID(bodyTuples)); 363 bodyAnswers.add(new AnswerTupleID(bodyTuples));
333 } 364 }
334 } catch (JRDFStoreException e) { 365 } catch (JRDFStoreException e) {
335 e.printStackTrace(); 366 e.printStackTrace();
336 } finally { 367 } finally {
337 if (bodyTuples != null) bodyTuples.dispose(); 368 if (bodyTuples != null) bodyTuples.dispose();
338 } 369 }
339
340 validIndexes.clear();
341 Utility.logTrace("Time to get all body answers: " + t.duration());
342 return bodyAnswers;
343 }
344
345 public static AnswerTupleID project(AnswerTupleID tuple, String[] vars, String[] subVars) {
346 int subArity = 0;
347 for (int i = 0; i < subVars.length; ++i)
348 if (subVars[i] != null) ++subArity;
349
350 if (tuple.getArity() == subArity)
351 return tuple;
352
353 AnswerTupleID newTuple = new AnswerTupleID(subArity);
354 for (int i = 0, j = 0; i < vars.length; ++i)
355 if (subVars[i] != null && !subVars[i].isEmpty()) {
356 newTuple.setTerm(j++, tuple.getTerm(i));
357 }
358
359 return newTuple;
360 }
361
362 public static String[] getVarSubset(String[] vars, Atom... atoms) {
363 String[] newVars = new String[vars.length];
364 Set<Variable> allVars = new HashSet<Variable>();
365 int arity;
366 for (Atom atom: atoms) {
367 arity = atom.getArity();
368 if (atom.getDLPredicate() instanceof AnnotatedEquality) arity = 2;
369 for (int j = 0; j < arity; ++j)
370 if (atom.getArgument(j) instanceof Variable) {
371 allVars.add(atom.getArgumentVariable(j));
372 }
373 }
374
375 for (int i = 0; i < vars.length; ++i) {
376 newVars[i] = allVars.contains(Variable.create(vars[i])) ? vars[i] : null;
377 }
378
379 return newVars;
380 }
381
382 public static String[] getCommonVars(DLClause clause) {
383 Set<Variable> headVars = getVariables(clause.getHeadAtoms());
384 Set<Variable> bodyVars = getVariables(clause.getBodyAtoms());
385 370
386 Collection<String> common = new LinkedList<String>(); 371 validIndexes.clear();
387 for (Variable v: headVars) 372 Utility.logTrace("Time to get all body answers: " + t.duration());
388 if (bodyVars.contains(v)) common.add(v.getName()); 373 return bodyAnswers;
389
390 return common.toArray(new String[0]);
391 } 374 }
392 375
393 public static Set<Variable> getVariables(Atom[] atoms) { 376 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause) {
394 Set<Variable> v = new HashSet<Variable>(); 377 return m_bottom.process(m_approxExist.convert(clause, originalDLClause, null));
395 for (Atom atom: atoms) atom.getVariables(v);
396 return v;
397 } 378 }
398 379
399 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause) { 380 public Collection<DLClause> convertExist(DLClause clause, DLClause originalDLClause, Collection<Tuple<Individual>> violationTuples) {
400 return m_bottom.process(m_approxExist.convert(clause, originalDLClause)); 381 return m_bottom.process(m_approxExist.convert(clause, originalDLClause, violationTuples));
401 } 382 }
402 383
403 public void save(String filename) { 384 public void save(String filename) {
@@ -414,11 +395,9 @@ public abstract class MultiStageUpperProgram {
414 e.printStackTrace(); 395 e.printStackTrace();
415 } catch (IOException e) { 396 } catch (IOException e) {
416 e.printStackTrace(); 397 e.printStackTrace();
417 } 398 }
418 } 399 }
419 400
420 Set<DLPredicate> updatedPredicates = new HashSet<DLPredicate>();
421
422// public void addUpdatedPredicates(Set<DLPredicate> predicates) { 401// public void addUpdatedPredicates(Set<DLPredicate> predicates) {
423// for (Iterator<DLPredicate> iter = predicates.iterator(); iter.hasNext(); ) { 402// for (Iterator<DLPredicate> iter = predicates.iterator(); iter.hasNext(); ) {
424// updatedPredicate.add(iter.next()); 403// updatedPredicate.add(iter.next());
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/Normalisation.java b/src/uk/ac/ox/cs/pagoda/multistage/Normalisation.java
index 4667747..c5482e7 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/Normalisation.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/Normalisation.java
@@ -1,82 +1,93 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.HashMap; 4import org.semanticweb.owlapi.model.*;
5import java.util.HashSet;
6import java.util.Iterator;
7import java.util.LinkedList;
8import java.util.Map;
9import java.util.Set;
10
11import org.semanticweb.HermiT.model.AtLeast;
12import org.semanticweb.HermiT.model.AtLeastConcept;
13import org.semanticweb.HermiT.model.AtLeastDataRange;
14import org.semanticweb.HermiT.model.Atom;
15import org.semanticweb.HermiT.model.AtomicConcept;
16import org.semanticweb.HermiT.model.AtomicNegationConcept;
17import org.semanticweb.HermiT.model.AtomicRole;
18import org.semanticweb.HermiT.model.Constant;
19import org.semanticweb.HermiT.model.ConstantEnumeration;
20import org.semanticweb.HermiT.model.DLClause;
21import org.semanticweb.HermiT.model.Individual;
22import org.semanticweb.HermiT.model.Inequality;
23import org.semanticweb.HermiT.model.InverseRole;
24import org.semanticweb.HermiT.model.LiteralConcept;
25import org.semanticweb.HermiT.model.Role;
26import org.semanticweb.HermiT.model.Variable;
27import org.semanticweb.owlapi.model.OWLClass;
28import org.semanticweb.owlapi.model.OWLClassExpression;
29import org.semanticweb.owlapi.model.OWLDataHasValue;
30import org.semanticweb.owlapi.model.OWLDataProperty;
31import org.semanticweb.owlapi.model.OWLDataPropertyExpression;
32import org.semanticweb.owlapi.model.OWLLiteral;
33import org.semanticweb.owlapi.model.OWLObjectHasSelf;
34import org.semanticweb.owlapi.model.OWLObjectInverseOf;
35import org.semanticweb.owlapi.model.OWLObjectMinCardinality;
36import org.semanticweb.owlapi.model.OWLObjectProperty;
37import org.semanticweb.owlapi.model.OWLObjectPropertyExpression;
38import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom;
39import org.semanticweb.owlapi.model.OWLOntology;
40
41import uk.ac.ox.cs.pagoda.MyPrefixes; 5import uk.ac.ox.cs.pagoda.MyPrefixes;
42import uk.ac.ox.cs.pagoda.approx.Clause; 6import uk.ac.ox.cs.pagoda.approx.Clause;
43import uk.ac.ox.cs.pagoda.approx.Clausifier; 7import uk.ac.ox.cs.pagoda.approx.Clausifier;
44import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 8import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
45import uk.ac.ox.cs.pagoda.rules.OverApproxExist; 9import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
10import uk.ac.ox.cs.pagoda.rules.approximators.SkolemTermsManager;
46import uk.ac.ox.cs.pagoda.util.Namespace; 11import uk.ac.ox.cs.pagoda.util.Namespace;
47import uk.ac.ox.cs.pagoda.util.Utility; 12import uk.ac.ox.cs.pagoda.util.Utility;
48 13
14import java.util.*;
15
49public class Normalisation { 16public class Normalisation {
50 17
51// MultiStageUpperProgram m_program; 18 public static final String auxiliaryConceptPrefix = Namespace.PAGODA_AUX + "concept_";
19 private static final Variable X = Variable.create("X"), Y = Variable.create("Y");
20 // MultiStageUpperProgram m_program;
52 OWLOntology m_ontology; 21 OWLOntology m_ontology;
53 BottomStrategy m_botStrategy; 22 BottomStrategy m_botStrategy;
54 Collection<DLClause> m_rules; 23 Collection<DLClause> m_rules;
55 Set<DLClause> m_normClauses = new HashSet<DLClause>(); 24 Set<DLClause> m_normClauses = new HashSet<DLClause>();
56 Map<DLClause, DLClause> exist2original = new HashMap<DLClause, DLClause>(); 25 Map<DLClause, DLClause> exist2original = new HashMap<DLClause, DLClause>();
57 26 Map<String, AtLeastConcept> rightAuxiliaryConcept = new HashMap<String, AtLeastConcept>();
27 private Map<AtLeastConcept, AtomicConcept> leftAuxiliaryConcept = new HashMap<AtLeastConcept, AtomicConcept>();
28 private Map<AtomicConcept, AtLeastConcept> leftAuxiliaryConcept_inv = new HashMap<AtomicConcept, AtLeastConcept>();
29
30 public Normalisation(Collection<DLClause> rules, OWLOntology ontology, BottomStrategy botStrategy) {
31// m_program = program;
32 m_ontology = ontology;
33 m_rules = rules;
34 m_botStrategy = botStrategy;
35 }
36
37 public static AtLeastConcept toAtLeastConcept(AtLeast p) {
38 if (p instanceof AtLeastConcept) return (AtLeastConcept) p;
39 AtLeastDataRange aldr = (AtLeastDataRange) p;
40 return AtLeastConcept.create(aldr.getNumber(), aldr.getOnRole(), AtomicConcept.create(MyPrefixes.PAGOdAPrefixes.expandIRI(aldr.getToDataRange().toString())));
41 }
42
43 private static String getName(String iri) {
44 int index = iri.lastIndexOf("#");
45 if (index != -1) return iri.substring(index + 1);
46 index = iri.lastIndexOf("/");
47 if (index != -1) return iri.substring(index + 1);
48 return iri;
49 }
50
51 public static String getAuxiliaryConcept4Disjunct(AtLeastConcept alc, Individual... individuals) {
52 Role r = alc.getOnRole();
53 LiteralConcept c = alc.getToConcept();
54 StringBuilder builder = new StringBuilder(auxiliaryConceptPrefix);
55 if (r instanceof AtomicRole)
56 builder.append(getName(((AtomicRole) r).getIRI()));
57 else
58 builder.append(getName(((InverseRole) r).getInverseOf().getIRI())).append("_inv");
59
60 if (alc.getNumber() > 1)
61 builder.append("_").append(alc.getNumber());
62
63 if (c instanceof AtomicConcept) {
64 if (!c.equals(AtomicConcept.THING))
65 builder.append("_").append(getName(((AtomicConcept) c).getIRI()));
66 } else
67 builder.append("_").append(getName((OverApproxExist.getNegationConcept(((AtomicNegationConcept) c).getNegatedAtomicConcept()).getIRI())));
68
69 if (individuals.length > 1)
70 builder.append("_").append(getName(individuals[0].getIRI()));
71
72 builder.append("_exist");
73
74 return builder.toString();
75 }
76
58 public Set<DLClause> getNormlisedClauses() { 77 public Set<DLClause> getNormlisedClauses() {
59 return m_normClauses; 78 return m_normClauses;
60 } 79 }
61 80
62 public Normalisation(Collection<DLClause> rules, OWLOntology ontology, BottomStrategy botStrategy) {
63// m_program = program;
64 m_ontology = ontology;
65 m_rules = rules;
66 m_botStrategy = botStrategy;
67 }
68
69 public void process() { 81 public void process() {
70 for (DLClause clause: m_rules) 82 for (DLClause clause : m_rules)
71 if (m_botStrategy.isBottomRule(clause)) 83 if (m_botStrategy.isBottomRule(clause))
72 processBottomRule(clause); 84 processBottomRule(clause);
73 else if (clause.getHeadLength() == 1) { 85 else if (clause.getHeadLength() == 1) {
74 if (clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast) 86 if (clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast)
75 processExistentialRule(clause); 87 processExistentialRule(clause);
76 else 88 else
77 m_normClauses.add(clause); 89 m_normClauses.add(clause);
78 } 90 } else
79 else
80 processDisjunctiveRule(clause); 91 processDisjunctiveRule(clause);
81 } 92 }
82 93
@@ -85,75 +96,71 @@ public class Normalisation {
85 m_normClauses.add(clause); 96 m_normClauses.add(clause);
86 return ; 97 return ;
87 } 98 }
88 99
89 Atom headAtom = clause.getHeadAtom(0); 100 Atom headAtom = clause.getHeadAtom(0);
90 if (headAtom.getDLPredicate() instanceof AtLeastDataRange) { 101 if (headAtom.getDLPredicate() instanceof AtLeastDataRange) {
91 m_normClauses.add(clause); 102 m_normClauses.add(clause);
92 return ; 103 return ;
93 } 104 }
94 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate(); 105 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate();
95 AtomicConcept ac = getRightAuxiliaryConcept(alc, OverApproxExist.getNewIndividual(clause, 0)); 106 // TODO test
96 DLClause newClause; 107// AtomicConcept ac = getRightAuxiliaryConcept(alc, OverApproxExist.getNewIndividual(clause, 0));
108 AtomicConcept ac = getRightAuxiliaryConcept(alc, SkolemTermsManager.getInstance().getFreshIndividual(clause, 0));
109 DLClause newClause;
97 m_normClauses.add(DLClause.create(new Atom[] {Atom.create(ac, headAtom.getArgument(0)) }, clause.getBodyAtoms())); 110 m_normClauses.add(DLClause.create(new Atom[] {Atom.create(ac, headAtom.getArgument(0)) }, clause.getBodyAtoms()));
98 m_normClauses.add(newClause = DLClause.create(new Atom[] {Atom.create(alc, X)}, new Atom[] {Atom.create(ac, X)})); 111 m_normClauses.add(newClause = DLClause.create(new Atom[] {Atom.create(alc, X)}, new Atom[] {Atom.create(ac, X)}));
99 exist2original.put(newClause, clause); 112 exist2original.put(newClause, clause);
100 }
101
102 public static AtLeastConcept toAtLeastConcept(AtLeast p) {
103 if (p instanceof AtLeastConcept) return (AtLeastConcept) p;
104 AtLeastDataRange aldr = (AtLeastDataRange) p;
105 return AtLeastConcept.create(aldr.getNumber(), aldr.getOnRole(), AtomicConcept.create(MyPrefixes.PAGOdAPrefixes.expandIRI(aldr.getToDataRange().toString())));
106 } 113 }
107 114
108 private void processDisjunctiveRule(DLClause clause) { 115 private void processDisjunctiveRule(DLClause clause) {
109 boolean toNormalise = false; 116 boolean toNormalise = false;
110 for (Atom atom: clause.getHeadAtoms()) 117 for (Atom atom: clause.getHeadAtoms())
111 if (!(atom.getDLPredicate() instanceof AtomicConcept)) { 118 if (!(atom.getDLPredicate() instanceof AtomicConcept)) {
112 toNormalise = true; 119 toNormalise = true;
113 break; 120 break;
114 } 121 }
115 122
116 if (!toNormalise) { 123 if (!toNormalise) {
117 m_normClauses.add(clause); 124 m_normClauses.add(clause);
118 return ; 125 return;
119 } 126 }
120 127
121 Atom[] newHeadAtoms = new Atom[clause.getHeadLength()]; 128 Atom[] newHeadAtoms = new Atom[clause.getHeadLength()];
122 Set<Atom> additionalAtoms = new HashSet<Atom>(); 129 Set<Atom> additionalAtoms = new HashSet<Atom>();
123 int index = 0; 130 int index = 0;
124 DLClause newClause; 131 DLClause newClause;
125 for (Atom headAtom: clause.getHeadAtoms()) { 132 for (Atom headAtom: clause.getHeadAtoms()) {
126 if (headAtom.getDLPredicate() instanceof AtLeast) { 133 if (headAtom.getDLPredicate() instanceof AtLeast) {
127 AtLeast al = (AtLeast) headAtom.getDLPredicate(); 134 AtLeast al = (AtLeast) headAtom.getDLPredicate();
128 if (al instanceof AtLeastDataRange && ((AtLeastDataRange) al).getToDataRange() instanceof ConstantEnumeration) { 135 if (al instanceof AtLeastDataRange && ((AtLeastDataRange) al).getToDataRange() instanceof ConstantEnumeration) {
129 ConstantEnumeration ldr = (ConstantEnumeration) ((AtLeastDataRange) al).getToDataRange(); 136 ConstantEnumeration ldr = (ConstantEnumeration) ((AtLeastDataRange) al).getToDataRange();
130 newHeadAtoms[index] = null; 137 newHeadAtoms[index] = null;
131 Atom newHeadAtom; 138 Atom newHeadAtom;
132 for (int i = 0; i < ldr.getNumberOfConstants(); ++i) { 139 for (int i = 0; i < ldr.getNumberOfConstants(); ++i) {
133 newHeadAtom = Atom.create(AtomicRole.create(((AtomicRole) ((AtLeastDataRange) al).getOnRole()).getIRI()), headAtom.getArgument(0), ldr.getConstant(i)); 140 newHeadAtom = Atom.create(AtomicRole.create(((AtomicRole) al.getOnRole()).getIRI()), headAtom.getArgument(0), ldr.getConstant(i));
134 if (newHeadAtoms[index] == null) newHeadAtoms[index] = newHeadAtom; 141 if (newHeadAtoms[index] == null) newHeadAtoms[index] = newHeadAtom;
135 else additionalAtoms.add(newHeadAtom); 142 else additionalAtoms.add(newHeadAtom);
136 } 143 }
137 } else { 144 } else {
138 AtLeastConcept alc = toAtLeastConcept((AtLeast) headAtom.getDLPredicate()); 145 AtLeastConcept alc = toAtLeastConcept((AtLeast) headAtom.getDLPredicate());
139 AtomicConcept ac = getRightAuxiliaryConcept(alc, OverApproxExist.getNewIndividual(clause, 0)); 146// AtomicConcept ac = getRightAuxiliaryConcept(alc, OverApproxExist.getNewIndividual(clause, 0));
147 AtomicConcept ac = getRightAuxiliaryConcept(alc, SkolemTermsManager.getInstance().getFreshIndividual(clause, 0));
140 newHeadAtoms[index] = Atom.create(ac, headAtom.getArgument(0)); 148 newHeadAtoms[index] = Atom.create(ac, headAtom.getArgument(0));
141 m_normClauses.add(newClause = DLClause.create(new Atom[] {Atom.create(alc, headAtom.getArgument(0))}, new Atom[] {newHeadAtoms[index]})); 149 m_normClauses.add(newClause = DLClause.create(new Atom[] {Atom.create(alc, headAtom.getArgument(0))}, new Atom[] {newHeadAtoms[index]}));
142 exist2original.put(newClause, clause); 150 exist2original.put(newClause, clause);
143 } 151 }
144 } 152 } else
145 else
146 newHeadAtoms[index] = headAtom; 153 newHeadAtoms[index] = headAtom;
147 ++index; 154 ++index;
148 } 155 }
149 156
150 if (!additionalAtoms.isEmpty()) { 157 if (!additionalAtoms.isEmpty()) {
151 Atom[] tempHeadAtoms = newHeadAtoms; 158 Atom[] tempHeadAtoms = newHeadAtoms;
152 newHeadAtoms = new Atom[newHeadAtoms.length + additionalAtoms.size()]; 159 newHeadAtoms = new Atom[newHeadAtoms.length + additionalAtoms.size()];
153 for (int i = 0; i < tempHeadAtoms.length; ++i) 160 for (int i = 0; i < tempHeadAtoms.length; ++i)
154 newHeadAtoms[i] = tempHeadAtoms[i]; 161 newHeadAtoms[i] = tempHeadAtoms[i];
155 int tempI = tempHeadAtoms.length; 162 int tempI = tempHeadAtoms.length;
156 for (Iterator<Atom> iter = additionalAtoms.iterator(); iter.hasNext(); ) 163 for (Iterator<Atom> iter = additionalAtoms.iterator(); iter.hasNext(); )
157 newHeadAtoms[tempI++] = iter.next(); 164 newHeadAtoms[tempI++] = iter.next();
158 additionalAtoms.clear(); 165 additionalAtoms.clear();
159 } 166 }
@@ -169,202 +176,157 @@ public class Normalisation {
169 return ; 176 return ;
170 } 177 }
171 } 178 }
172 179
173 boolean toNormalise = false; 180 boolean toNormalise = false;
174 for (Atom atom: clause.getBodyAtoms()) 181 for (Atom atom: clause.getBodyAtoms())
175 if (!(atom.getDLPredicate() instanceof AtomicConcept)) 182 if (!(atom.getDLPredicate() instanceof AtomicConcept))
176 toNormalise = true; 183 toNormalise = true;
177 184
178 if (!toNormalise) { 185 if (!toNormalise) {
179 m_normClauses.add(clause); 186 m_normClauses.add(clause);
180 return ; 187 return;
181 } 188 }
182 189
183 Clause myClause = null; 190 Clause myClause = null;
184 try { 191 try {
185 myClause = new Clause(Clausifier.getInstance(m_ontology), clause); 192 myClause = new Clause(Clausifier.getInstance(m_ontology), clause);
186 } catch (Exception e) { 193 } catch (Exception e) {
187 Utility.logError("The clause: " + clause + " cannot be rolled up into GCI."); 194 Utility.logError("The clause: " + clause + " cannot be rolled up into GCI.");
188 m_normClauses.add(clause); 195 m_normClauses.add(clause);
189 return ; 196 return;
190 } 197 }
191 198
192 Atom[] newBodyAtoms = new Atom [myClause.getSubClasses().size()]; 199 Atom[] newBodyAtoms = new Atom [myClause.getSubClasses().size()];
193 int index = 0; 200 int index = 0;
194 for (OWLClassExpression clsExp: myClause.getSubClasses()) { 201 for (OWLClassExpression clsExp: myClause.getSubClasses()) {
195 if (clsExp instanceof OWLClass) 202 if (clsExp instanceof OWLClass)
196 newBodyAtoms[index] = Atom.create(AtomicConcept.create(((OWLClass) clsExp).getIRI().toString()), X); 203 newBodyAtoms[index] = Atom.create(AtomicConcept.create(((OWLClass) clsExp).getIRI().toString()), X);
197 else if (clsExp instanceof OWLObjectSomeValuesFrom || clsExp instanceof OWLObjectMinCardinality) { 204 else if (clsExp instanceof OWLObjectSomeValuesFrom || clsExp instanceof OWLObjectMinCardinality) {
198 int number; 205 int number;
199 OWLObjectPropertyExpression prop; 206 OWLObjectPropertyExpression prop;
200 OWLClassExpression filler; 207 OWLClassExpression filler;
201 if (clsExp instanceof OWLObjectSomeValuesFrom) { 208 if (clsExp instanceof OWLObjectSomeValuesFrom) {
202 OWLObjectSomeValuesFrom owl = (OWLObjectSomeValuesFrom) clsExp; 209 OWLObjectSomeValuesFrom owl = (OWLObjectSomeValuesFrom) clsExp;
203 number = 1; 210 number = 1;
204 prop = owl.getProperty(); 211 prop = owl.getProperty();
205 filler = owl.getFiller(); 212 filler = owl.getFiller();
206 } 213 }
207 else { 214 else {
208 OWLObjectMinCardinality owl = (OWLObjectMinCardinality) clsExp; 215 OWLObjectMinCardinality owl = (OWLObjectMinCardinality) clsExp;
209 number = owl.getCardinality(); 216 number = owl.getCardinality();
210 prop = owl.getProperty(); 217 prop = owl.getProperty();
211 filler = owl.getFiller(); 218 filler = owl.getFiller();
212 } 219 }
213 220
214 Role r = null; 221 Role r = null;
215 if (prop instanceof OWLObjectProperty) 222 if (prop instanceof OWLObjectProperty)
216 r = AtomicRole.create(((OWLObjectProperty) prop).getIRI().toString()); 223 r = AtomicRole.create(((OWLObjectProperty) prop).getIRI().toString());
217 else 224 else
218 r = InverseRole.create(AtomicRole.create(((OWLObjectProperty) (((OWLObjectInverseOf) prop).getInverse())).getIRI().toString())); 225 r = InverseRole.create(AtomicRole.create(((OWLObjectProperty) (((OWLObjectInverseOf) prop).getInverse())).getIRI().toString()));
219 226
220 LiteralConcept c = AtomicConcept.create(((OWLClass) filler).getIRI().toString()); 227 LiteralConcept c = AtomicConcept.create(((OWLClass) filler).getIRI().toString());
221 AtomicConcept ac = getLeftAuxiliaryConcept(AtLeastConcept.create(number, r, c), false); 228 AtomicConcept ac = getLeftAuxiliaryConcept(AtLeastConcept.create(number, r, c), false);
222 229
223 m_normClauses.add(exists_r_C_implies_A(number, r, c, ac)); 230 m_normClauses.add(exists_r_C_implies_A(number, r, c, ac));
224 newBodyAtoms[index] = Atom.create(ac, X); 231 newBodyAtoms[index] = Atom.create(ac, X);
225 } 232 }
226// else if (clsExp instanceof OWLDataSomeValuesFrom || clsExp instanceof OWLDataMinCardinality) { 233// else if (clsExp instanceof OWLDataSomeValuesFrom || clsExp instanceof OWLDataMinCardinality) {
227// int number; 234// int number;
228// OWLDataPropertyExpression prop; 235// OWLDataPropertyExpression prop;
229// OWLDataRange filler; 236// OWLDataRange filler;
230// if (clsExp instanceof OWLDataSomeValuesFrom) { 237// if (clsExp instanceof OWLDataSomeValuesFrom) {
231// OWLDataSomeValuesFrom owl = (OWLDataSomeValuesFrom) clsExp; 238// OWLDataSomeValuesFrom owl = (OWLDataSomeValuesFrom) clsExp;
232// number = 1; 239// number = 1;
233// prop = owl.getProperty(); 240// prop = owl.getProperty();
234// filler = owl.getFiller(); 241// filler = owl.getFiller();
235// } 242// }
236// else { 243// else {
237// OWLDataMinCardinality owl = (OWLDataMinCardinality) clsExp; 244// OWLDataMinCardinality owl = (OWLDataMinCardinality) clsExp;
238// number = owl.getCardinality(); 245// number = owl.getCardinality();
239// prop = owl.getProperty(); 246// prop = owl.getProperty();
240// filler = owl.getFiller(); 247// filler = owl.getFiller();
241// } 248// }
242// 249//
243// Role r = AtomicRole.create(((OWLDataProperty) prop).getIRI().toString()); 250// Role r = AtomicRole.create(((OWLDataProperty) prop).getIRI().toString());
244// 251//
245// LiteralConcept c = AtomicConcept.create(((OWLClass) filler).getIRI().toString()); 252// LiteralConcept c = AtomicConcept.create(((OWLClass) filler).getIRI().toString());
246// AtomicConcept ac = getLeftAuxiliaryConcept(AtLeastConcept.create(number, r, c), false); 253// AtomicConcept ac = getLeftAuxiliaryConcept(AtLeastConcept.create(number, r, c), false);
247// 254//
248// m_normClauses.add(exists_r_C_implies_A(number, r, c, ac)); 255// m_normClauses.add(exists_r_C_implies_A(number, r, c, ac));
249// newBodyAtoms[index] = Atom.create(ac, X); 256// newBodyAtoms[index] = Atom.create(ac, X);
250// } 257// }
251 else if (clsExp instanceof OWLObjectHasSelf) { 258 else if (clsExp instanceof OWLObjectHasSelf) {
252 OWLObjectPropertyExpression prop = ((OWLObjectHasSelf) clsExp).getProperty(); 259 OWLObjectPropertyExpression prop = ((OWLObjectHasSelf) clsExp).getProperty();
253 AtomicRole r; 260 AtomicRole r;
254 if (prop instanceof OWLObjectProperty) 261 if (prop instanceof OWLObjectProperty)
255 r = AtomicRole.create(((OWLObjectProperty) prop).getIRI().toString()); 262 r = AtomicRole.create(((OWLObjectProperty) prop).getIRI().toString());
256 else 263 else
257 r = AtomicRole.create(((OWLObjectProperty) (((OWLObjectInverseOf) prop).getInverse())).getIRI().toString()); 264 r = AtomicRole.create(((OWLObjectProperty) (((OWLObjectInverseOf) prop).getInverse())).getIRI().toString());
258 newBodyAtoms[index] = Atom.create(r, X, X); 265 newBodyAtoms[index] = Atom.create(r, X, X);
259 } 266 }
260 else if (clsExp instanceof OWLDataHasValue) { 267 else if (clsExp instanceof OWLDataHasValue) {
261 OWLDataPropertyExpression prop = ((OWLDataHasValue) clsExp).getProperty(); 268 OWLDataPropertyExpression prop = ((OWLDataHasValue) clsExp).getProperty();
262 AtomicRole r = AtomicRole.create(((OWLDataProperty) prop).getIRI().toString()); 269 AtomicRole r = AtomicRole.create(((OWLDataProperty) prop).getIRI().toString());
263 OWLLiteral l = ((OWLDataHasValue) clsExp).getValue(); 270 OWLLiteral l = ((OWLDataHasValue) clsExp).getValue();
264 if (l.getDatatype().toStringID().equals(Namespace.RDF_PLAIN_LITERAL)) 271 if (l.getDatatype().toStringID().equals(Namespace.RDF_PLAIN_LITERAL))
265 newBodyAtoms[index] = Atom.create(r, X, Constant.create(l.getLiteral() + "@" + l.getLang(), Namespace.RDF_PLAIN_LITERAL)); 272 newBodyAtoms[index] = Atom.create(r, X, Constant.create(l.getLiteral() + "@" + l.getLang(), Namespace.RDF_PLAIN_LITERAL));
266 else 273 else
267 newBodyAtoms[index] = Atom.create(r, X, Constant.create(l.getLiteral(), l.getDatatype().toStringID())); 274 newBodyAtoms[index] = Atom.create(r, X, Constant.create(l.getLiteral(), l.getDatatype().toStringID()));
268 } 275 } else {
269 else {
270 newBodyAtoms[index] = null; 276 newBodyAtoms[index] = null;
271 Utility.logError("counld not translate OWLClassExpression: " + clsExp + " in " + clause); 277 Utility.logError("counld not translate OWLClassExpression: " + clsExp + " in " + clause);
272 } 278 }
273 ++index; 279 ++index;
274 } 280 }
275 281
276 m_normClauses.add(DLClause.create(clause.getHeadAtoms(), newBodyAtoms)); 282 m_normClauses.add(DLClause.create(clause.getHeadAtoms(), newBodyAtoms));
277 } 283 }
278 284
279 private static final Variable X = Variable.create("X"), Y = Variable.create("Y");
280
281 private DLClause exists_r_C_implies_A(int n, Role r, LiteralConcept c, AtomicConcept a) { 285 private DLClause exists_r_C_implies_A(int n, Role r, LiteralConcept c, AtomicConcept a) {
282 Variable[] Ys = new Variable[n]; 286 Variable[] Ys = new Variable[n];
283 if (n == 1) Ys[0] = Y; 287 if (n == 1) Ys[0] = Y;
284 else 288 else
285 for (int i = 0; i < n; ++i) 289 for (int i = 0; i < n; ++i)
286 Ys[i] = Variable.create("Y" + (i + 1)); 290 Ys[i] = Variable.create("Y" + (i + 1));
287 Collection<Atom> bodyAtoms = new LinkedList<Atom>(); 291 Collection<Atom> bodyAtoms = new LinkedList<Atom>();
288 292
289 for (int i = 0; i < n; ++i) { 293 for (int i = 0; i < n; ++i) {
290 Atom rxy = r instanceof AtomicRole ? 294 Atom rxy = r instanceof AtomicRole ?
291 Atom.create(((AtomicRole) r), X, Ys[i]) : 295 Atom.create(((AtomicRole) r), X, Ys[i]) :
292 Atom.create(((InverseRole) r).getInverseOf(), Ys[i], X); 296 Atom.create(((InverseRole) r).getInverseOf(), Ys[i], X);
293 bodyAtoms.add(rxy); 297 bodyAtoms.add(rxy);
294 if (!c.equals(AtomicConcept.THING)) 298 if (!c.equals(AtomicConcept.THING))
295 bodyAtoms.add(Atom.create((AtomicConcept) c, Ys[i])); 299 bodyAtoms.add(Atom.create((AtomicConcept) c, Ys[i]));
296 } 300 }
297 301
298 for (int i = 0; i < n; ++i) 302 for (int i = 0; i < n; ++i)
299 for (int j = i + 1; j < n; ++j) 303 for (int j = i + 1; j < n; ++j)
300 bodyAtoms.add(Atom.create(Inequality.INSTANCE, Ys[i], Ys[j])); 304 bodyAtoms.add(Atom.create(Inequality.INSTANCE, Ys[i], Ys[j]));
301
302 return DLClause.create(new Atom[] {Atom.create(a, X)}, bodyAtoms.toArray(new Atom[0]));
303 }
304
305 private Map<AtLeastConcept, AtomicConcept> leftAuxiliaryConcept = new HashMap<AtLeastConcept, AtomicConcept>();
306 private Map<AtomicConcept, AtLeastConcept> leftAuxiliaryConcept_inv = new HashMap<AtomicConcept, AtLeastConcept>();
307
308 public static final String auxiliaryConceptPrefix = Namespace.PAGODA_AUX + "concept_";
309 305
310 private static String getName(String iri) { 306 return DLClause.create(new Atom[]{Atom.create(a, X)}, bodyAtoms.toArray(new Atom[0]));
311 int index = iri.lastIndexOf("#");
312 if (index != -1) return iri.substring(index + 1);
313 index = iri.lastIndexOf("/");
314 if (index != -1) return iri.substring(index + 1);
315 return iri;
316 } 307 }
317 308
318 private AtomicConcept getRightAuxiliaryConcept(AtLeastConcept alc, Individual... individuals) { 309 private AtomicConcept getRightAuxiliaryConcept(AtLeastConcept alc, Individual... individuals) {
319 String iri = getAuxiliaryConcept4Disjunct(alc, individuals); 310 String iri = getAuxiliaryConcept4Disjunct(alc, individuals);
320 rightAuxiliaryConcept.put(iri, alc); 311 rightAuxiliaryConcept.put(iri, alc);
321 return AtomicConcept.create(iri); 312 return AtomicConcept.create(iri);
322 }
323
324 public static String getAuxiliaryConcept4Disjunct(AtLeastConcept alc, Individual... individuals) {
325 Role r = alc.getOnRole();
326 LiteralConcept c = alc.getToConcept();
327 StringBuilder builder = new StringBuilder(auxiliaryConceptPrefix);
328 if (r instanceof AtomicRole)
329 builder.append(getName(((AtomicRole) r).getIRI()));
330 else
331 builder.append(getName(((InverseRole) r).getInverseOf().getIRI())).append("_inv");
332
333 if (alc.getNumber() > 1)
334 builder.append("_").append(alc.getNumber());
335
336 if (c instanceof AtomicConcept) {
337 if (!c.equals(AtomicConcept.THING))
338 builder.append("_").append(getName(((AtomicConcept) c).getIRI()));
339 }
340 else
341 builder.append("_").append(getName((OverApproxExist.getNegationConcept(((AtomicNegationConcept) c).getNegatedAtomicConcept()).getIRI())));
342
343 if (individuals.length > 1)
344 builder.append("_").append(getName(individuals[0].getIRI()));
345
346 builder.append("_exist");
347
348 return builder.toString();
349 } 313 }
350 314
351 public AtomicConcept getLeftAuxiliaryConcept(AtLeastConcept key, boolean available) { 315 public AtomicConcept getLeftAuxiliaryConcept(AtLeastConcept key, boolean available) {
352// AtLeastConcept key = AtLeastConcept.create(1, r, c); 316// AtLeastConcept key = AtLeastConcept.create(1, r, c);
353 AtomicConcept value = null; 317 AtomicConcept value = null;
354 if ((value = leftAuxiliaryConcept.get(key)) != null); 318 if ((value = leftAuxiliaryConcept.get(key)) != null) ;
355 else if (!available) { 319 else if (!available) {
356 value = AtomicConcept.create(getAuxiliaryConcept4Disjunct(key)); 320 value = AtomicConcept.create(getAuxiliaryConcept4Disjunct(key));
357 leftAuxiliaryConcept.put(key, value); 321 leftAuxiliaryConcept.put(key, value);
358 leftAuxiliaryConcept_inv.put(value, key); 322 leftAuxiliaryConcept_inv.put(value, key);
359 } 323 }
360 return value; 324 return value;
361 } 325 }
362 326
363 public AtLeastConcept getLeftAtLeastConcept(AtomicConcept value) { 327 public AtLeastConcept getLeftAtLeastConcept(AtomicConcept value) {
364 return leftAuxiliaryConcept_inv.get(value); 328 return leftAuxiliaryConcept_inv.get(value);
365 } 329 }
366
367 Map<String, AtLeastConcept> rightAuxiliaryConcept = new HashMap<String, AtLeastConcept>();
368 330
369 public AtLeastConcept getRightAtLeastConcept(AtomicConcept p) { 331 public AtLeastConcept getRightAtLeastConcept(AtomicConcept p) {
370 return rightAuxiliaryConcept.get(p.getIRI()); 332 return rightAuxiliaryConcept.get(p.getIRI());
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java
index f8e6524..b16e645 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication.java
@@ -1,22 +1,14 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.HashSet;
5import java.util.LinkedList;
6
7import org.semanticweb.HermiT.model.AtLeastConcept;
8import org.semanticweb.HermiT.model.Atom;
9import org.semanticweb.HermiT.model.AtomicConcept;
10import org.semanticweb.HermiT.model.AtomicNegationConcept;
11import org.semanticweb.HermiT.model.AtomicRole;
12import org.semanticweb.HermiT.model.DLClause;
13import org.semanticweb.HermiT.model.DLPredicate;
14import org.semanticweb.HermiT.model.InverseRole;
15
16import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
17import uk.ac.ox.cs.pagoda.rules.Program; 5import uk.ac.ox.cs.pagoda.rules.Program;
18import uk.ac.ox.cs.pagoda.util.Timer; 6import uk.ac.ox.cs.pagoda.util.Timer;
19 7
8import java.util.Collection;
9import java.util.HashSet;
10import java.util.LinkedList;
11
20public class RestrictedApplication extends MultiStageUpperProgram { 12public class RestrictedApplication extends MultiStageUpperProgram {
21 13
22 Normalisation norm; 14 Normalisation norm;
@@ -30,58 +22,48 @@ public class RestrictedApplication extends MultiStageUpperProgram {
30 clauses.addAll(constraints); 22 clauses.addAll(constraints);
31 } 23 }
32 24
33 private void addNegativeDatalogRules() { 25 // It should be shifting
34 Collection<DLClause> allRules = new LinkedList<DLClause>(rules); 26 public static Collection<DLClause> addAdditionalDatalogRules(DLClause clause, BottomStrategy bottom, Normalisation norm) {
35 allRules.addAll(constraints); 27 LinkedList<DLClause> newClauses = new LinkedList<DLClause>();
36 for (DLClause clause: allRules) { 28 Atom[] headAtoms = clause.getHeadAtoms();
37 for (DLClause newClause: addAddtionalDatalogRules(clause, m_bottom, norm)) 29 Atom[] bodyAtoms = clause.getBodyAtoms();
38 addDatalogRule(newClause); 30 int headLength = headAtoms.length;
39 }
40 allRules.clear();
41 }
42
43 public static Collection<DLClause> addAddtionalDatalogRules(DLClause clause, BottomStrategy bottom, Normalisation norm) {
44 LinkedList<DLClause> newClauses = new LinkedList<DLClause>();
45 Atom[] headAtoms = clause.getHeadAtoms();
46 Atom[] bodyAtoms = clause.getBodyAtoms();
47 int headLength = headAtoms.length;
48 int bodyLength = bodyAtoms.length; 31 int bodyLength = bodyAtoms.length;
49 DLClause tClause; 32 DLClause tClause;
50 if (bottom.isBottomRule(clause)) { 33 if (bottom.isBottomRule(clause)) {
51 if (clause.getBodyLength() == 1) return newClauses; 34 if (clause.getBodyLength() == 1) return newClauses;
52 for (int i = 0; i < bodyLength; ++i) { 35 for (int i = 0; i < bodyLength; ++i) {
53 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) { 36 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) {
54 AtomicConcept ac = (AtomicConcept) bodyAtoms[i].getDLPredicate(); 37 AtomicConcept ac = (AtomicConcept) bodyAtoms[i].getDLPredicate();
55 if (!ac.getIRI().endsWith("_exist")) { 38 if (!ac.getIRI().endsWith("_exist")) {
56 Atom[] newBodyAtoms = new Atom[bodyLength - 1]; 39 Atom[] newBodyAtoms = new Atom[bodyLength - 1];
57 for (int j = 0; j < bodyLength - 1; ++j) 40 for (int j = 0; j < bodyLength - 1; ++j)
58 newBodyAtoms[j] = j < i ? bodyAtoms[j] : bodyAtoms[j + 1]; 41 newBodyAtoms[j] = j < i ? bodyAtoms[j] : bodyAtoms[j + 1];
59 42
60 Atom negativeAtom = getNegativeAtom(bodyAtoms[i]); 43 Atom negativeAtom = getNegativeAtom(bodyAtoms[i]);
61 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms); 44 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms);
62 // addDatalogRule(tClause); 45 // addDatalogRule(tClause);
63 newClauses.add(tClause); 46 newClauses.add(tClause);
64 } 47 } else {
65 else {
66 Atom[] newBodyAtoms = new Atom[bodyLength]; 48 Atom[] newBodyAtoms = new Atom[bodyLength];
67 Atom negativeAtom = null; 49 Atom negativeAtom = null;
68 org.semanticweb.HermiT.model.Variable E = org.semanticweb.HermiT.model.Variable.create("E"); 50 org.semanticweb.HermiT.model.Variable E = org.semanticweb.HermiT.model.Variable.create("E");
69 AtLeastConcept alc = norm.getLeftAtLeastConcept(ac); 51 AtLeastConcept alc = norm.getLeftAtLeastConcept(ac);
70 52
71 if (alc.getOnRole() instanceof AtomicRole) 53 if (alc.getOnRole() instanceof AtomicRole)
72 newBodyAtoms[i] = Atom.create((AtomicRole) alc.getOnRole(), bodyAtoms[i].getArgument(0), E); 54 newBodyAtoms[i] = Atom.create((AtomicRole) alc.getOnRole(), bodyAtoms[i].getArgument(0), E);
73 else 55 else
74 newBodyAtoms[i] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), E, bodyAtoms[i].getArgument(0)); 56 newBodyAtoms[i] = Atom.create(((InverseRole) alc.getOnRole()).getInverseOf(), E, bodyAtoms[i].getArgument(0));
75 if (alc.getToConcept() instanceof AtomicConcept) 57 if (alc.getToConcept() instanceof AtomicConcept)
76 negativeAtom = getNegativeAtom(Atom.create((AtomicConcept) alc.getToConcept(), E)); 58 negativeAtom = getNegativeAtom(Atom.create((AtomicConcept) alc.getToConcept(), E));
77 else 59 else
78 negativeAtom = getNegativeAtom(Atom.create(((AtomicNegationConcept) alc.getToConcept()).getNegatedAtomicConcept(), E)); 60 negativeAtom = getNegativeAtom(Atom.create(((AtomicNegationConcept) alc.getToConcept()).getNegatedAtomicConcept(), E));
79 61
80 for (int j = 0; j < bodyLength; ++j) 62 for (int j = 0; j < bodyLength; ++j)
81 if (i != j) 63 if (i != j)
82 newBodyAtoms[j] = bodyAtoms[j]; 64 newBodyAtoms[j] = bodyAtoms[j];
65
83 66
84
85 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms); 67 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms);
86 // addDatalogRule(tClause); 68 // addDatalogRule(tClause);
87 newClauses.add(tClause); 69 newClauses.add(tClause);
@@ -91,55 +73,65 @@ public class RestrictedApplication extends MultiStageUpperProgram {
91 } 73 }
92 else if (headLength > 1) { 74 else if (headLength > 1) {
93 for (int i = 0; i < headLength; ++i) { 75 for (int i = 0; i < headLength; ++i) {
94 DLPredicate p = headAtoms[i].getDLPredicate(); 76 DLPredicate p = headAtoms[i].getDLPredicate();
95 if (!(p instanceof AtomicConcept)) { 77 if (!(p instanceof AtomicConcept)) {
96 return newClauses; 78 return newClauses;
97 } 79 }
98 } 80 }
99 81
100 for (int i = 0; i < headLength; ++i) { 82 for (int i = 0; i < headLength; ++i) {
101 Atom[] newBodyAtoms = new Atom[headLength + bodyLength - 1]; 83 Atom[] newBodyAtoms = new Atom[headLength + bodyLength - 1];
102 for (int j = 0; j < headLength + bodyLength - 1; ++j) 84 for (int j = 0; j < headLength + bodyLength - 1; ++j)
103 newBodyAtoms[j] = j < bodyLength ? bodyAtoms[j] : 85 newBodyAtoms[j] = j < bodyLength ? bodyAtoms[j] :
104 j < bodyLength + i ? getNegativeAtom(headAtoms[j - bodyLength]) : 86 j < bodyLength + i ? getNegativeAtom(headAtoms[j - bodyLength]) :
105 getNegativeAtom(headAtoms[j - bodyLength + 1]); 87 getNegativeAtom(headAtoms[j - bodyLength + 1]);
106 88
107 tClause = DLClause.create(new Atom[] { headAtoms[i] }, newBodyAtoms); 89 tClause = DLClause.create(new Atom[]{headAtoms[i]}, newBodyAtoms);
108// addDatalogRule(tClause); 90// addDatalogRule(tClause);
109 newClauses.add(tClause); 91 newClauses.add(tClause);
110 } 92 }
111 } 93 }
112 else if (headLength == 1) { 94 else if (headLength == 1) {
113 DLPredicate p = clause.getHeadAtom(0).getDLPredicate(); 95 DLPredicate p = clause.getHeadAtom(0).getDLPredicate();
114 if (p instanceof AtomicConcept) { 96 if (p instanceof AtomicConcept) {
115 Atom negativeHeadAtom = getNegativeAtom(clause.getHeadAtom(0)); 97 Atom negativeHeadAtom = getNegativeAtom(clause.getHeadAtom(0));
116 for (int i = 0; i < bodyLength; ++i) 98 for (int i = 0; i < bodyLength; ++i)
117 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) { 99 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) {
118 Atom[] newBodyAtoms = new Atom[clause.getBodyLength()]; 100 Atom[] newBodyAtoms = new Atom[clause.getBodyLength()];
119 newBodyAtoms[0] = negativeHeadAtom; 101 newBodyAtoms[0] = negativeHeadAtom;
120 for (int j = 1; j < bodyLength; ++j) 102 for (int j = 1; j < bodyLength; ++j)
121 newBodyAtoms[j] = j <= i ? bodyAtoms[j - 1] : bodyAtoms[j]; 103 newBodyAtoms[j] = j <= i ? bodyAtoms[j - 1] : bodyAtoms[j];
122 104
123 tClause = DLClause.create(new Atom[] {getNegativeAtom(bodyAtoms[i])}, newBodyAtoms); 105 tClause = DLClause.create(new Atom[]{getNegativeAtom(bodyAtoms[i])}, newBodyAtoms);
124// addDatalogRule(tClause); 106// addDatalogRule(tClause);
125 newClauses.add(tClause); 107 newClauses.add(tClause);
126 } 108 }
127 } 109 }
128 else if (p instanceof AtLeastConcept && clause.getBodyLength() == 1 && clause.getBodyAtom(0).getDLPredicate() instanceof AtomicConcept) { 110 else if (p instanceof AtLeastConcept && clause.getBodyLength() == 1 && clause.getBodyAtom(0).getDLPredicate() instanceof AtomicConcept) {
129 AtLeastConcept alc = (AtLeastConcept) p; 111 AtLeastConcept alc = (AtLeastConcept) p;
130 AtomicConcept ac = norm.getLeftAuxiliaryConcept(alc, true); 112 AtomicConcept ac = norm.getLeftAuxiliaryConcept(alc, true);
131 if (ac != null) { 113 if (ac != null) {
132 Atom bodyAtom = clause.getBodyAtom(0); 114 Atom bodyAtom = clause.getBodyAtom(0);
133// addDatalogRule(DLClause.create(new Atom[] {getNegativeAtom(bodyAtom)}, 115// addDatalogRule(DLClause.create(new Atom[] {getNegativeAtom(bodyAtom)},
134// new Atom[] {getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} )); 116// new Atom[] {getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} ));
135 newClauses.add(DLClause.create(new Atom[] {getNegativeAtom(bodyAtom)}, 117 newClauses.add(DLClause.create(new Atom[]{getNegativeAtom(bodyAtom)},
136 new Atom[] {getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} )); 118 new Atom[]{getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))}));
137 } 119 }
138 } 120 }
139 } 121 }
140 return newClauses; 122 return newClauses;
141 } 123 }
142 124
125 private void addNegativeDatalogRules() {
126 Collection<DLClause> allRules = new LinkedList<DLClause>(rules);
127 allRules.addAll(constraints);
128 for (DLClause clause : allRules) {
129 for (DLClause newClause : addAdditionalDatalogRules(clause, m_bottom, norm))
130 addDatalogRule(newClause);
131 }
132 allRules.clear();
133 }
134
143 public Normalisation getNormalisation() { 135 public Normalisation getNormalisation() {
144 return norm; 136 return norm;
145 } 137 }
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java b/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java
deleted file mode 100644
index 66e8a17..0000000
--- a/src/uk/ac/ox/cs/pagoda/multistage/RestrictedApplication2.java
+++ /dev/null
@@ -1,177 +0,0 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import java.util.Arrays;
4import java.util.Collection;
5import java.util.Comparator;
6import java.util.LinkedList;
7
8import org.semanticweb.HermiT.model.AtLeastConcept;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicConcept;
11import org.semanticweb.HermiT.model.DLClause;
12import org.semanticweb.HermiT.model.DLPredicate;
13
14import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
15import uk.ac.ox.cs.pagoda.multistage.treatement.SimpleComparator;
16import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
17import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
18import uk.ac.ox.cs.pagoda.rules.Program;
19
20public class RestrictedApplication2 extends TwoStageApplication {
21
22 private Normalisation norm;
23 private boolean hasDisjunctions;
24 private Comparator<Atom> disjunctComparator;
25
26 public RestrictedApplication2(TwoStageQueryEngine engine, DatalogProgram program, GapByStore4ID gap) {
27 super(engine, program, gap);
28 if (hasDisjunctions) {
29 addNegativeDatalogRules();
30 disjunctComparator = new SimpleComparator();
31 }
32 }
33
34 private void addNegativeDatalogRules() {
35 Collection<DLClause> allRules = new LinkedList<DLClause>(rules);
36 allRules.addAll(constraints);
37 for (DLClause clause: allRules) {
38 addAddtionalDatalogRules(clause);
39 }
40 allRules.clear();
41 }
42
43 private void addAddtionalDatalogRules(DLClause clause) {
44 Atom[] headAtoms = clause.getHeadAtoms();
45 Atom[] bodyAtoms = clause.getBodyAtoms();
46 int headLength = headAtoms.length;
47 int bodyLength = bodyAtoms.length;
48 DLClause tClause;
49 if (m_bottom.isBottomRule(clause)) {
50 if (clause.getBodyLength() == 1) return ;
51 for (int i = 0; i < bodyLength; ++i)
52 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) {
53 Atom[] newBodyAtoms = new Atom[bodyLength - 1];
54 for (int j = 0; j < bodyLength - 1; ++j)
55 newBodyAtoms[j] = j < i ? bodyAtoms[j] : bodyAtoms[j + 1];
56
57 Atom negativeAtom = MultiStageUpperProgram.getNegativeAtom(bodyAtoms[i]);
58 tClause = DLClause.create(new Atom[] { negativeAtom }, newBodyAtoms);
59 addDatalogRule(tClause);
60 }
61 }
62 else if (headLength > 1) {
63 for (int i = 0; i < headLength; ++i) {
64 DLPredicate p = headAtoms[i].getDLPredicate();
65 if (!(p instanceof AtomicConcept)) {
66 return ;
67 }
68 }
69
70 for (int i = 0; i < headLength; ++i) {
71 Atom[] newBodyAtoms = new Atom[headLength + bodyLength - 1];
72 for (int j = 0; j < headLength + bodyLength - 1; ++j)
73 newBodyAtoms[j] = j < bodyLength ? bodyAtoms[j] :
74 j < bodyLength + i ? MultiStageUpperProgram.getNegativeAtom(headAtoms[j - bodyLength]) :
75 MultiStageUpperProgram.getNegativeAtom(headAtoms[j - bodyLength + 1]);
76
77 tClause = DLClause.create(new Atom[] { headAtoms[i] }, newBodyAtoms);
78 addDatalogRule(tClause);
79 }
80 }
81 else if (headLength == 1) {
82 DLPredicate p = clause.getHeadAtom(0).getDLPredicate();
83 if (p instanceof AtomicConcept) {
84 Atom negativeHeadAtom = MultiStageUpperProgram.getNegativeAtom(clause.getHeadAtom(0));
85 for (int i = 0; i < bodyLength; ++i)
86 if (bodyAtoms[i].getDLPredicate() instanceof AtomicConcept) {
87 Atom[] newBodyAtoms = new Atom[clause.getBodyLength()];
88 newBodyAtoms[0] = negativeHeadAtom;
89 for (int j = 1; j < bodyLength; ++j)
90 newBodyAtoms[j] = j <= i ? bodyAtoms[j - 1] : bodyAtoms[j];
91
92 tClause = DLClause.create(new Atom[] {MultiStageUpperProgram.getNegativeAtom(bodyAtoms[i])}, newBodyAtoms);
93 addDatalogRule(tClause);
94 }
95 }
96 else if (p instanceof AtLeastConcept && clause.getBodyLength() == 1 && clause.getBodyAtom(0).getDLPredicate() instanceof AtomicConcept) {
97 AtLeastConcept alc = (AtLeastConcept) p;
98 AtomicConcept ac = norm.getLeftAuxiliaryConcept(alc, true);
99 if (ac != null) {
100 Atom bodyAtom = clause.getBodyAtom(0);
101 addDatalogRule(DLClause.create(new Atom[] {MultiStageUpperProgram.getNegativeAtom(bodyAtom)},
102 new Atom[] {MultiStageUpperProgram.getNegativeAtom(Atom.create(ac, bodyAtom.getArgument(0)))} ));
103 }
104 }
105 }
106 }
107
108 @Override
109 protected void addAuxiliaryRules() {
110 for (DLClause constraint: constraints)
111 if (constraint.getHeadLength() <= 1)
112 processExistentialRule(constraint);
113 else
114 processDisjunctiveRule(constraint);
115 }
116
117 private static final Atom[] empty = new Atom[0];
118
119 private void processDisjunctiveRule(DLClause constraint) {
120 int headLength = constraint.getHeadLength();
121 Atom[] orderedAtoms = new Atom[headLength];
122 for (int i = 0; i < headLength; ++i)
123 orderedAtoms[i] = constraint.getHeadAtom(i);
124
125 Arrays.sort(orderedAtoms, disjunctComparator);
126
127 Collection<Atom> bodyAtoms = new LinkedList<Atom>();
128 for (int i = 0; i < headLength; ++i) {
129 bodyAtoms.add(getNAFAtom(orderedAtoms[i]));
130 }
131 for (Atom atom: constraint.getBodyAtoms())
132 bodyAtoms.add(atom);
133
134 Atom negAtom;
135 for (Atom atom: constraint.getHeadAtoms()) {
136 negAtom = MultiStageUpperProgram.getNegativeAtom(atom);
137 bodyAtoms.add(getNAFAtom(negAtom));
138 addDatalogRule(DLClause.create(new Atom[] {atom}, bodyAtoms.toArray(empty)));
139 }
140 }
141
142 private void processExistentialRule(DLClause constraint) {
143 Atom[] bodyAtoms = new Atom[constraint.getBodyLength() + 1];
144 bodyAtoms[0] = getNAFAtom(constraint.getHeadAtom(0));
145 int i = 0;
146 for (Atom atom: constraint.getBodyAtoms())
147 bodyAtoms[++i] = atom;
148
149 Collection<DLClause> overClauses = overExist.convert(constraint, getOriginalClause(constraint));
150 for (DLClause clause: overClauses)
151 if (DLClauseHelper.hasSubsetBodyAtoms(clause, constraint))
152 addDatalogRule(DLClause.create(new Atom[] {clause.getHeadAtom(0)}, bodyAtoms));
153 }
154
155 @Override
156 protected Collection<DLClause> getInitialClauses(Program program) {
157 Collection<DLClause> clauses = program.getClauses();
158 hasDisjunctions = false;
159 for (DLClause clause: clauses)
160 if (clause.getHeadLength() > 1) {
161 hasDisjunctions = true;
162 break;
163 }
164
165 if (hasDisjunctions) {
166 norm = new Normalisation(clauses, program.getOntology(), m_bottom);
167 norm.process();
168 clauses = norm.m_normClauses;
169 }
170 return clauses;
171 }
172
173 protected DLClause getOriginalClause(DLClause clause) {
174 DLClause original = super.getOriginalClause(clause);
175 return norm.getOriginalClause(original);
176 }
177}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java
index 5d2e0d1..e652f66 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java
@@ -8,23 +8,24 @@ import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
8import uk.ac.ox.cs.pagoda.util.Utility; 8import uk.ac.ox.cs.pagoda.util.Utility;
9 9
10public abstract class StageQueryEngine extends BasicQueryEngine { 10public abstract class StageQueryEngine extends BasicQueryEngine {
11 11
12 protected boolean checkValidity; 12 protected boolean checkValidity;
13 13 Boolean validMaterialisation = null;
14
14 public StageQueryEngine(String name, boolean checkValidity) { 15 public StageQueryEngine(String name, boolean checkValidity) {
15 super(name); 16 super(name);
16 this.checkValidity = checkValidity; 17 this.checkValidity = checkValidity;
17 } 18 }
18 19
19 public abstract void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap); 20 public abstract void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap);
20 21
21 public abstract int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap); 22 public abstract int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap);
22 23
24 public abstract int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap);
25
23 public void dispose() { 26 public void dispose() {
24 super.dispose(); 27 super.dispose();
25 } 28 }
26
27 Boolean validMaterialisation = null;
28 29
29 public boolean isValid() { 30 public boolean isValid() {
30 if (!checkValidity) return true; 31 if (!checkValidity) return true;
@@ -43,9 +44,9 @@ public abstract class StageQueryEngine extends BasicQueryEngine {
43 } 44 }
44 45
45 if (validMaterialisation) 46 if (validMaterialisation)
46 Utility.logInfo("The lazy-upper-bound store is valid."); 47 Utility.logInfo("The " + name + " store is valid.");
47 else 48 else
48 Utility.logInfo("The lazy-upper-bound store is not valid."); 49 Utility.logInfo("The " + name + " store is not valid.");
49 return validMaterialisation; 50 return validMaterialisation;
50 } 51 }
51 52
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java b/src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java
deleted file mode 100644
index f08bfbd..0000000
--- a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageApplication.java
+++ /dev/null
@@ -1,265 +0,0 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import java.util.Collection;
4import java.util.HashMap;
5import java.util.HashSet;
6import java.util.LinkedList;
7import java.util.Map;
8import java.util.Set;
9
10import org.semanticweb.HermiT.model.AnnotatedEquality;
11import org.semanticweb.HermiT.model.AtLeast;
12import org.semanticweb.HermiT.model.AtLeastConcept;
13import org.semanticweb.HermiT.model.AtLeastDataRange;
14import org.semanticweb.HermiT.model.Atom;
15import org.semanticweb.HermiT.model.AtomicConcept;
16import org.semanticweb.HermiT.model.AtomicNegationConcept;
17import org.semanticweb.HermiT.model.AtomicRole;
18import org.semanticweb.HermiT.model.DLClause;
19import org.semanticweb.HermiT.model.DLPredicate;
20import org.semanticweb.HermiT.model.Equality;
21import org.semanticweb.HermiT.model.Inequality;
22import org.semanticweb.HermiT.model.Variable;
23
24import uk.ac.ox.cs.JRDFox.JRDFStoreException;
25import uk.ac.ox.cs.JRDFox.store.TupleIterator;
26import uk.ac.ox.cs.pagoda.MyPrefixes;
27import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
28import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
29import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
30import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
31import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
32import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
33import uk.ac.ox.cs.pagoda.rules.Program;
34import uk.ac.ox.cs.pagoda.util.Namespace;
35import uk.ac.ox.cs.pagoda.util.SparqlHelper;
36import uk.ac.ox.cs.pagoda.util.Utility;
37
38abstract class TwoStageApplication {
39
40 protected TwoStageQueryEngine engine;
41 protected MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes;
42 private GapByStore4ID gap;
43
44 Program lowerProgram;
45
46 boolean m_incrementally = true;
47
48 protected Set<DLClause> rules = new HashSet<DLClause>();
49 private StringBuilder datalogRuleText = new StringBuilder();
50
51 protected Collection<DLClause> constraints = new LinkedList<DLClause>();
52 protected BottomStrategy m_bottom;
53
54 protected Set<Atom> toGenerateNAFFacts = new HashSet<Atom>();
55
56 protected OverApproxExist overExist = new OverApproxExist();
57
58 private Map<DLClause, DLClause> map = new HashMap<DLClause, DLClause>();
59
60 public TwoStageApplication(TwoStageQueryEngine engine, DatalogProgram program, GapByStore4ID gap) {
61 this.engine = engine;
62 tripleManager = new RDFoxTripleManager(engine.getDataStore(), m_incrementally);
63 this.gap = gap;
64 m_bottom = program.getUpperBottomStrategy();
65 lowerProgram = program.getLower();
66
67 Variable X = Variable.create("X");
68 Collection<DLClause> clauses = getInitialClauses(program.getGeneral());
69 Collection<DLClause> introducedConstraints = new LinkedList<DLClause>();
70 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>();
71 for (DLClause clause : m_bottom.process(clauses)) {
72 if (m_bottom.isBottomRule(clause)
73 || clause.getHeadLength() == 1
74 && !(clause.getHeadAtom(0).getDLPredicate() instanceof AtLeast))
75 addDatalogRule(clause);
76 else {
77 newHeadAtoms.clear();
78 boolean changed = false;
79 for (Atom atom : clause.getHeadAtoms()) {
80 if (atom.getDLPredicate() instanceof AtLeastConcept) {
81 AtLeastConcept atLeast = (AtLeastConcept) atom
82 .getDLPredicate();
83 if (atLeast.getToConcept() instanceof AtomicNegationConcept) {
84 AtomicConcept positive = ((AtomicNegationConcept) atLeast
85 .getToConcept()).getNegatedAtomicConcept();
86 AtomicConcept negative = OverApproxExist
87 .getNegationConcept(positive);
88 Atom atom1 = Atom.create(positive, X);
89 Atom atom2 = Atom.create(negative, X);
90 introducedConstraints.add(DLClause.create(
91 new Atom[0], new Atom[] { atom1, atom2 }));
92 newHeadAtoms.add(Atom.create(AtLeastConcept.create(
93 atLeast.getArity(), atLeast.getOnRole(),
94 negative), atom.getArgument(0)));
95 changed = true;
96 continue;
97 }
98 } else if (atom.getDLPredicate() instanceof AtLeastDataRange)
99 changed = true;
100 else
101 newHeadAtoms.add(atom);
102
103 }
104 if (!changed)
105 constraints.add(clause);
106 else if (!newHeadAtoms.isEmpty()) {
107 DLClause newClause = DLClause.create(
108 newHeadAtoms.toArray(new Atom[0]),
109 clause.getBodyAtoms());
110 map.put(newClause, clause);
111 constraints.add(newClause);
112 }
113 }
114 }
115
116 for (DLClause clause : m_bottom.process(introducedConstraints))
117 addDatalogRule(clause);
118
119 }
120
121 int materialise() {
122 StringBuilder builder = new StringBuilder(getDatalogRuleText());
123 for (DLClause clause: lowerProgram.getClauses())
124 if (!rules.contains(clause))
125 builder.append(RuleHelper.getText(clause));
126
127 engine.materialise(builder.toString(), null, false);
128 addAuxiliaryRules();
129 addAuxiliaryNAFFacts();
130 engine.materialise(getDatalogRuleText(), gap, m_incrementally);
131 return engine.isValid() ? 1 : 0;
132 }
133
134 void checkNAFFacts() {
135 int counter = 0;
136 TupleIterator tuples = null;
137 for (Atom atom : toGenerateNAFFacts) {
138 try {
139 counter = 0;
140 atom = getNAFAtom(atom);
141 tuples = engine.internal_evaluate(SparqlHelper.getSPARQLQuery(
142 new Atom[] { atom }, atom.getArgumentVariable(0)
143 .getName()));
144 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) {
145 ++counter;
146 }
147 Utility.logDebug(atom + " " + counter);
148 } catch (JRDFStoreException e) {
149 // TODO Auto-generated catch block
150 e.printStackTrace();
151 } finally {
152 if (tuples != null) tuples.dispose();
153 tuples = null;
154 }
155 }
156 }
157
158 protected void addDatalogRule(DLClause clause) {
159 if (clause.getBodyAtom(0).equals(clause.getHeadAtom(0)))
160 return;
161 rules.add(clause);
162 datalogRuleText.append(RuleHelper.getText(clause)).append('\n');
163 }
164
165 public String getDatalogRuleText() {
166 StringBuilder program = new StringBuilder();
167 program.append(prefixes.prefixesText());
168 program.append(datalogRuleText.toString());
169 return program.toString();
170 }
171
172 protected abstract void addAuxiliaryRules();
173
174 Set<Integer> allIndividuals = new HashSet<Integer>();
175 RDFoxTripleManager tripleManager;
176
177 private void addAuxiliaryNAFFacts() {
178
179 for (int id : tripleManager.getResourceIDs(engine.getAllIndividuals()))
180 allIndividuals.add(id);
181
182 DLPredicate naf;
183 DLPredicate p;
184 for (Atom atom: toGenerateNAFFacts) {
185 naf = getNAFAtom(atom, false).getDLPredicate();
186 p = atom.getDLPredicate();
187
188 int typeID = tripleManager.getResourceID(Namespace.RDF_TYPE);
189 int conceptID = tripleManager.getResourceID(((AtomicConcept) naf)
190 .getIRI());
191 for (int answer : generateNAFFacts(p)) {
192 tripleManager.addTripleByID(new int[] { answer, typeID,
193 conceptID });
194 }
195 }
196 }
197
198 private Collection<Integer> generateNAFFacts(DLPredicate p) {
199 Variable X = Variable.create("X");
200 TupleIterator tuples = null;
201 Set<Integer> ret = new HashSet<Integer>(allIndividuals);
202 try {
203 tuples = engine.internal_evaluate(SparqlHelper.getSPARQLQuery(
204 new Atom[] { Atom.create(p, X) }, "X"));
205 for (long multi = tuples.open(); multi != 0; multi = tuples.getNext()) {
206 ret.remove((int) tuples.getResourceID(0));
207 }
208 } catch (JRDFStoreException e) {
209 // TODO Auto-generated catch block
210 e.printStackTrace();
211 } finally {
212 if (tuples != null) tuples.dispose();
213 }
214 return ret;
215 }
216
217 protected abstract Collection<DLClause> getInitialClauses(Program program);
218
219 private static final String NAF_suffix = "_NAF";
220
221 protected Atom getNAFAtom(Atom atom) {
222 return getNAFAtom(atom, true);
223 }
224
225 private Atom getNAFAtom(Atom atom, boolean update) {
226 DLPredicate p = atom.getDLPredicate();
227 if (update) {
228 toGenerateNAFFacts.add(atom);
229 }
230 if (p instanceof AtomicConcept) {
231 AtomicConcept nc = AtomicConcept.create(((AtomicConcept) p)
232 .getIRI() + "_NAF");
233 return Atom.create(nc, atom.getArgument(0));
234 }
235 if (p instanceof Equality || p instanceof AnnotatedEquality)
236 return Atom.create(
237 AtomicRole.create(Namespace.EQUALITY + NAF_suffix),
238 atom.getArgument(0), atom.getArgument(1));
239 if (p instanceof Inequality)
240 atom = Atom.create(
241 AtomicRole.create(Namespace.INEQUALITY + NAF_suffix),
242 atom.getArgument(0), atom.getArgument(1));
243 // if (p instanceof AtomicRole) {
244 // AtomicRole nr = AtomicRole.create(((AtomicRole) p).getIRI() +
245 // NAF_suffix);
246 // return Atom.create(nr, atom.getArgument(0), atom.getArgument(1));
247 // }
248 if (p instanceof AtLeastConcept) {
249 AtomicConcept nc = AtomicConcept.create(Normalisation
250 .getAuxiliaryConcept4Disjunct((AtLeastConcept) p)
251 + NAF_suffix);
252 return Atom.create(nc, atom.getArgument(0));
253 }
254 Utility.logError("Unknown DLPredicate in an atom: " + atom);
255 return null;
256 }
257
258 protected DLClause getOriginalClause(DLClause clause) {
259 DLClause original = map.get(clause);
260 if (original == null)
261 return clause;
262 return original;
263 }
264
265} \ No newline at end of file
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java
deleted file mode 100644
index b7f989f..0000000
--- a/src/uk/ac/ox/cs/pagoda/multistage/TwoStageQueryEngine.java
+++ /dev/null
@@ -1,103 +0,0 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import java.io.FileInputStream;
4import java.io.FileNotFoundException;
5import java.io.IOException;
6import java.util.Collection;
7
8import org.openrdf.rio.RDFHandlerException;
9import org.openrdf.rio.RDFParseException;
10import org.openrdf.rio.turtle.TurtleParser;
11
12import uk.ac.ox.cs.JRDFox.JRDFStoreException;
13import uk.ac.ox.cs.JRDFox.model.Individual;
14import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
15import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
16import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
17import uk.ac.ox.cs.pagoda.util.Timer;
18import uk.ac.ox.cs.pagoda.util.Utility;
19
20public class TwoStageQueryEngine extends StageQueryEngine {
21
22 IndividualCollector m_collector = new IndividualCollector();
23
24 public TwoStageQueryEngine(String name, boolean checkValidity) {
25 super(name, checkValidity);
26 }
27
28 @Override
29 public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) {
30 TwoStageApplication program = new FoldedApplication2(this, dProgram, gap);
31 program.materialise();
32 }
33
34 @Override
35 public void importRDFData(String fileName, String importedFile) {
36 super.importRDFData(fileName, importedFile);
37 TurtleParser parser = new TurtleParser();
38 parser.setRDFHandler(m_collector);
39 for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) {
40 FileInputStream inputStream;
41 try {
42 inputStream = new FileInputStream(file);
43 parser.parse(inputStream, "");
44 inputStream.close();
45 } catch (FileNotFoundException e) {
46 // TODO Auto-generated catch block
47 e.printStackTrace();
48 } catch (RDFParseException e) {
49 // TODO Auto-generated catch block
50 e.printStackTrace();
51 } catch (RDFHandlerException e) {
52 // TODO Auto-generated catch block
53 e.printStackTrace();
54 } catch (IOException e) {
55 // TODO Auto-generated catch block
56 e.printStackTrace();
57 }
58 }
59 }
60
61 @Override
62 public int materialiseRestrictedly(DatalogProgram dProgram, GapByStore4ID gap) {
63 TwoStageApplication program = new RestrictedApplication2(this, dProgram, gap);
64 return program.materialise();
65 }
66
67 public void materialise(String programText, GapByStore4ID gap, boolean incrementally) {
68 try {
69 if (gap != null) {
70 try {
71 gap.compile(incrementally ? null : programText);
72 gap.addBackTo();
73 } finally {
74 gap.clear();
75 }
76 } else {
77 long oldTripleCount = store.getTriplesCount();
78 Timer t = new Timer();
79
80 if (!incrementally)
81// store.addRules(new String[] {programText});
82 store.importRules(programText);
83 store.applyReasoning(incrementally);
84
85 long tripleCount = store.getTriplesCount();
86
87 Utility.logDebug("current store after materialising upper related rules: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
88 Utility.logDebug("current store finished the materialisation of upper related rules in " + t.duration() + " seconds.");
89 }
90 store.clearRulesAndMakeFactsExplicit();
91 } catch (JRDFStoreException e) {
92 e.printStackTrace();
93 }
94
95 }
96
97 public Collection<Individual> getAllIndividuals() {
98 return m_collector.getAllIndividuals();
99 }
100
101}
102
103
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/Violation.java b/src/uk/ac/ox/cs/pagoda/multistage/Violation.java
index b872f00..2f98784 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/Violation.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/Violation.java
@@ -1,9 +1,9 @@
1package uk.ac.ox.cs.pagoda.multistage; 1package uk.ac.ox.cs.pagoda.multistage;
2 2
3import java.util.LinkedList;
4
5import org.semanticweb.HermiT.model.DLClause; 3import org.semanticweb.HermiT.model.DLClause;
6 4
5import java.util.LinkedList;
6
7public class Violation { 7public class Violation {
8 8
9 DLClause clause = null; 9 DLClause clause = null;
@@ -41,6 +41,13 @@ public class Violation {
41 public String[] getVariables() { 41 public String[] getVariables() {
42 return vars; 42 return vars;
43 } 43 }
44 44
45 45 @Override
46 public String toString() {
47 return "Violation{" +
48 "constraint=" + constraint +
49 ", clause=" + clause +
50 ", tuples=" + tuples +
51 '}';
52 }
46} 53}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/ViolationTuple.java b/src/uk/ac/ox/cs/pagoda/multistage/ViolationTuple.java
new file mode 100644
index 0000000..129f5dd
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/multistage/ViolationTuple.java
@@ -0,0 +1,19 @@
1package uk.ac.ox.cs.pagoda.multistage;
2
3import org.semanticweb.HermiT.model.Individual;
4
5import java.util.ArrayList;
6
7/**
8 * Just a list of <tt>Individual</tt>s.
9 * */
10public class ViolationTuple extends ArrayList<Individual> {
11
12 public ViolationTuple() {
13 super();
14 }
15
16 public ViolationTuple(int size) {
17 super(size);
18 }
19}
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
index f40ae31..00e7e2b 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConcept.java
@@ -1,25 +1,8 @@
1package uk.ac.ox.cs.pagoda.multistage.treatement; 1package uk.ac.ox.cs.pagoda.multistage.treatement;
2 2
3import java.util.Arrays; 3import org.semanticweb.HermiT.model.*;
4import java.util.Collections; 4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
5import java.util.Comparator; 5import uk.ac.ox.cs.JRDFox.store.TupleIterator;
6import java.util.HashMap;
7import java.util.HashSet;
8import java.util.Iterator;
9import java.util.LinkedList;
10import java.util.Map;
11import java.util.Set;
12
13import org.semanticweb.HermiT.model.AnnotatedEquality;
14import org.semanticweb.HermiT.model.Atom;
15import org.semanticweb.HermiT.model.AtomicConcept;
16import org.semanticweb.HermiT.model.AtomicRole;
17import org.semanticweb.HermiT.model.DLClause;
18import org.semanticweb.HermiT.model.DLPredicate;
19import org.semanticweb.HermiT.model.Equality;
20import org.semanticweb.HermiT.model.Inequality;
21import org.semanticweb.HermiT.model.Variable;
22
23import uk.ac.ox.cs.pagoda.constraints.PredicateDependency; 6import uk.ac.ox.cs.pagoda.constraints.PredicateDependency;
24import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 7import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
25import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID; 8import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID;
@@ -31,45 +14,40 @@ import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
31import uk.ac.ox.cs.pagoda.util.Namespace; 14import uk.ac.ox.cs.pagoda.util.Namespace;
32import uk.ac.ox.cs.pagoda.util.SparqlHelper; 15import uk.ac.ox.cs.pagoda.util.SparqlHelper;
33import uk.ac.ox.cs.pagoda.util.Utility; 16import uk.ac.ox.cs.pagoda.util.Utility;
34import uk.ac.ox.cs.JRDFox.JRDFStoreException; 17import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
35import uk.ac.ox.cs.JRDFox.store.TupleIterator; 18import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder;
19
20import java.util.*;
36 21
37public abstract class Pick4NegativeConcept implements Treatment { 22public abstract class Pick4NegativeConcept implements Treatment {
38 23
39 MultiStageQueryEngine engine; 24 public Set<Atom> addedGroundAtoms = new HashSet<Atom>();
40 MultiStageUpperProgram program; 25 MultiStageQueryEngine engine;
41 RDFoxTripleManager tripleManager; 26 MultiStageUpperProgram program;
27 RDFoxTripleManager tripleManager;
42 PredicateDependency dependencyGraph; 28 PredicateDependency dependencyGraph;
43 boolean addGap = false; 29 boolean addGap = false;
44 30
45 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) { 31 public Pick4NegativeConcept(MultiStageQueryEngine store, MultiStageUpperProgram multiProgram) {
46 this.engine = store; 32 this.engine = store;
47 this.program = multiProgram; 33 this.program = multiProgram;
48 this.tripleManager = new RDFoxTripleManager(store.getDataStore(), true); 34 this.tripleManager = new RDFoxTripleManager(store.getDataStore(), true);
49 } 35 }
50 36
51 void addTripleByID(Atom atom, Atom gapAtom, Map<Variable, Integer> assignment) { 37 void addTripleByID(Atom atom, Atom gapAtom, Map<Variable, Integer> assignment) {
52 int[] newTuple = tripleManager.getInstance(atom, assignment); 38 int[] newTuple = tripleManager.getInstance(atom, assignment);
53 tripleManager.addTripleByID(newTuple); 39 tripleManager.addTripleByID(newTuple);
54 if (addGap) 40 if (addGap)
55 tripleManager.addTripleByID(tripleManager.getInstance(gapAtom, assignment)); 41 tripleManager.addTripleByID(tripleManager.getInstance(gapAtom, assignment));
56 } 42 }
57 43
58 public Set<Atom> addedGroundAtoms = new HashSet<Atom>();
59
60 protected boolean makeSatisfied(Violation violation, Comparator<Atom> comp) { 44 protected boolean makeSatisfied(Violation violation, Comparator<Atom> comp) {
61 LinkedList<AnswerTupleID> tuples = violation.getTuples(); 45 LinkedList<AnswerTupleID> tuples = violation.getTuples();
62 DLClause constraint = violation.getConstraint(); 46 DLClause constraint = violation.getConstraint();
63 Map<Variable, Integer> assignment = new HashMap<Variable, Integer>(); 47 Map<Variable, Integer> assignment = new HashMap<Variable, Integer>();
64 48
65 if (constraint.getHeadLength() > 1) { 49 if (constraint.getHeadLength() > 1) {
66 Atom[] orderedAtoms = new Atom[constraint.getHeadLength()]; 50 Atom[] orderedAtoms = Arrays.copyOf(constraint.getHeadAtoms(), constraint.getHeadLength());
67 int index = 0;
68
69 for (Atom headAtom: constraint.getHeadAtoms()) {
70 orderedAtoms[index++] = headAtom;
71 }
72
73 Arrays.sort(orderedAtoms, comp); 51 Arrays.sort(orderedAtoms, comp);
74 52
75 Set<AnswerTupleID> negTuples = new HashSet<AnswerTupleID>(); 53 Set<AnswerTupleID> negTuples = new HashSet<AnswerTupleID>();
@@ -103,10 +81,9 @@ public abstract class Pick4NegativeConcept implements Treatment {
103 AnswerTupleID lastAdded = null; 81 AnswerTupleID lastAdded = null;
104 82
105 for (Iterator<AnswerTupleID> iter = tuples.iterator(); iter.hasNext(); ) { 83 for (Iterator<AnswerTupleID> iter = tuples.iterator(); iter.hasNext(); ) {
106 84
107 AnswerTupleID tuple = iter.next(); 85 AnswerTupleID tuple = iter.next();
108 if (negTuples.contains(MultiStageUpperProgram.project(tuple, violation.getVariables(), subVars))) ; 86 if (!negTuples.contains(MultiStageUpperProgram.project(tuple, violation.getVariables(), subVars))) {
109 else {
110 if (lastAdded == null || tComp.compare(lastAdded, tuple) != 0) { 87 if (lastAdded == null || tComp.compare(lastAdded, tuple) != 0) {
111 lastAdded = tuple; 88 lastAdded = tuple;
112 tuple.getAssignment(violation.getVariables(), assignment); 89 tuple.getAssignment(violation.getVariables(), assignment);
@@ -120,28 +97,39 @@ public abstract class Pick4NegativeConcept implements Treatment {
120 if (tuples.isEmpty()) 97 if (tuples.isEmpty())
121 return true; 98 return true;
122 } 99 }
123 100 if (!tuples.isEmpty()) return false;
124 if (!tuples.isEmpty()) return false;
125
126 } 101 }
127 else { 102 else {
128 Set<Atom> headAtoms = new HashSet<Atom>(); 103 Set<Atom> headAtoms = new HashSet<Atom>();
129 for (DLClause clause: program.convertExist(constraint, violation.getClause())) { 104
130 if (DLClauseHelper.hasSubsetBodyAtoms(clause, constraint)) { 105 ArrayList<Tuple<Individual>> violationTuples = new ArrayList<>(violation.getTuples().size());
131 Atom tHeadAtom = clause.getHeadAtom(0); 106 for (int i = 0; i < violation.getTuples().size(); i++) {
132 Atom tGapHeadAtom = addGap ? getGapAtom(tHeadAtom) : null; 107 AnswerTupleID answerTupleID = violation.getTuples().get(i);
133 if (DLClauseHelper.isGround(tHeadAtom)) { 108 TupleBuilder<Individual> tupleBuilder = new TupleBuilder<>();
134 if (!addedGroundAtoms.contains(tHeadAtom)) { 109 for (int j = 0; j < answerTupleID.getArity(); j++) {
135 program.addUpdatedPredicate(tHeadAtom.getDLPredicate()); 110 String rawTerm = tripleManager.getRawTerm(answerTupleID.getTerm(j));
136 addTripleByID(tHeadAtom, tGapHeadAtom, null); 111 Individual individual = Individual.create(rawTerm.substring(1, rawTerm.length()-1));
137 addedGroundAtoms.add(tHeadAtom); 112 tupleBuilder.append(individual);
138 }
139 }
140 else headAtoms.add(tHeadAtom);
141 } 113 }
142 else { 114 violationTuples.add(tupleBuilder.build());
143 Utility.logError("There might be an error here... Can't happend!!!"); 115 }
116
117 for (DLClause clause : program.convertExist(constraint, violation.getClause(), violationTuples)) {
118
119 if (!DLClauseHelper.hasSubsetBodyAtoms(clause, constraint)) {
120 Utility.logError("There might be an error here... Cannot happen!!!");
121 throw new Error("This condition should not happen!!!");
144 } 122 }
123
124 Atom tHeadAtom = clause.getHeadAtom(0);
125 Atom tGapHeadAtom = addGap ? getGapAtom(tHeadAtom) : null;
126 if (DLClauseHelper.isGround(tHeadAtom)) {
127 if (!addedGroundAtoms.contains(tHeadAtom)) {
128 program.addUpdatedPredicate(tHeadAtom.getDLPredicate());
129 addTripleByID(tHeadAtom, tGapHeadAtom, null);
130 addedGroundAtoms.add(tHeadAtom);
131 }
132 } else headAtoms.add(tHeadAtom);
145 } 133 }
146 if (!tuples.isEmpty()) 134 if (!tuples.isEmpty())
147 for (Atom atom: headAtoms) 135 for (Atom atom: headAtoms)
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java
index ae168cf..10aa22f 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/Pick4NegativeConceptQuerySpecific.java
@@ -1,18 +1,7 @@
1package uk.ac.ox.cs.pagoda.multistage.treatement; 1package uk.ac.ox.cs.pagoda.multistage.treatement;
2 2
3import java.util.Comparator; 3import org.semanticweb.HermiT.model.*;
4import java.util.Set; 4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
5
6import org.semanticweb.HermiT.model.AnnotatedEquality;
7import org.semanticweb.HermiT.model.AtLeastConcept;
8import org.semanticweb.HermiT.model.Atom;
9import org.semanticweb.HermiT.model.AtomicConcept;
10import org.semanticweb.HermiT.model.AtomicNegationConcept;
11import org.semanticweb.HermiT.model.AtomicRole;
12import org.semanticweb.HermiT.model.DLPredicate;
13import org.semanticweb.HermiT.model.Equality;
14import org.semanticweb.HermiT.model.Inequality;
15import org.semanticweb.HermiT.model.InverseRole;
16import uk.ac.ox.cs.pagoda.constraints.PredicateDependency; 5import uk.ac.ox.cs.pagoda.constraints.PredicateDependency;
17import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 6import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
18import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 7import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
@@ -20,10 +9,11 @@ import uk.ac.ox.cs.pagoda.multistage.Normalisation;
20import uk.ac.ox.cs.pagoda.multistage.RestrictedApplication; 9import uk.ac.ox.cs.pagoda.multistage.RestrictedApplication;
21import uk.ac.ox.cs.pagoda.multistage.Violation; 10import uk.ac.ox.cs.pagoda.multistage.Violation;
22import uk.ac.ox.cs.pagoda.query.QueryRecord; 11import uk.ac.ox.cs.pagoda.query.QueryRecord;
23import uk.ac.ox.cs.pagoda.rules.OverApproxExist; 12import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
24import uk.ac.ox.cs.pagoda.util.Namespace; 13import uk.ac.ox.cs.pagoda.util.Namespace;
25 14
26import uk.ac.ox.cs.JRDFox.JRDFStoreException; 15import java.util.Comparator;
16import java.util.Set;
27 17
28public class Pick4NegativeConceptQuerySpecific extends Pick4NegativeConcept { 18public class Pick4NegativeConceptQuerySpecific extends Pick4NegativeConcept {
29 19
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/treatement/SkolemTreatment.java b/src/uk/ac/ox/cs/pagoda/multistage/treatement/SkolemTreatment.java
index c65fdf9..7a92b79 100644
--- a/src/uk/ac/ox/cs/pagoda/multistage/treatement/SkolemTreatment.java
+++ b/src/uk/ac/ox/cs/pagoda/multistage/treatement/SkolemTreatment.java
@@ -1,11 +1,11 @@
1package uk.ac.ox.cs.pagoda.multistage.treatement; 1package uk.ac.ox.cs.pagoda.multistage.treatement;
2 2
3import uk.ac.ox.cs.JRDFox.JRDFStoreException;
3import uk.ac.ox.cs.pagoda.multistage.FoldedApplication; 4import uk.ac.ox.cs.pagoda.multistage.FoldedApplication;
4import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
5import uk.ac.ox.cs.pagoda.multistage.Violation; 6import uk.ac.ox.cs.pagoda.multistage.Violation;
6 7
7import uk.ac.ox.cs.JRDFox.JRDFStoreException; 8// TODO should I implement something like this?
8
9public class SkolemTreatment implements Treatment { 9public class SkolemTreatment implements Treatment {
10 10
11 public SkolemTreatment(MultiStageQueryEngine multiStageQueryEngine, FoldedApplication program) { 11 public SkolemTreatment(MultiStageQueryEngine multiStageQueryEngine, FoldedApplication program) {
diff --git a/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java b/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java
index 8d7e0b1..54e4837 100644
--- a/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java
+++ b/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java
@@ -1,13 +1,10 @@
1package uk.ac.ox.cs.pagoda.query; 1package uk.ac.ox.cs.pagoda.query;
2 2
3import java.util.HashMap; 3import com.google.gson.*;
4import java.util.Map;
5
6import org.semanticweb.HermiT.model.Constant; 4import org.semanticweb.HermiT.model.Constant;
7import org.semanticweb.HermiT.model.Individual; 5import org.semanticweb.HermiT.model.Individual;
8import org.semanticweb.HermiT.model.Term; 6import org.semanticweb.HermiT.model.Term;
9import org.semanticweb.HermiT.model.Variable; 7import org.semanticweb.HermiT.model.Variable;
10
11import uk.ac.ox.cs.JRDFox.JRDFStoreException; 8import uk.ac.ox.cs.JRDFox.JRDFStoreException;
12import uk.ac.ox.cs.JRDFox.model.Datatype; 9import uk.ac.ox.cs.JRDFox.model.Datatype;
13import uk.ac.ox.cs.JRDFox.model.GroundTerm; 10import uk.ac.ox.cs.JRDFox.model.GroundTerm;
@@ -15,13 +12,21 @@ import uk.ac.ox.cs.JRDFox.model.Literal;
15import uk.ac.ox.cs.JRDFox.store.TupleIterator; 12import uk.ac.ox.cs.JRDFox.store.TupleIterator;
16import uk.ac.ox.cs.pagoda.util.Namespace; 13import uk.ac.ox.cs.pagoda.util.Namespace;
17 14
15import java.lang.reflect.Type;
16import java.util.HashMap;
17import java.util.Map;
18import java.util.StringTokenizer;
19import java.util.regex.Matcher;
20import java.util.regex.Pattern;
21
18public class AnswerTuple { 22public class AnswerTuple {
19 23
20 public static final String SEPARATOR = "\t"; 24 public static final String SEPARATOR = "\t";
25 static final Pattern owlLiteralRegex =
26 Pattern.compile("^\"(?<lexicalForm>[^@]+(@(?<langTag>.+))?)\"(^^<(?<dataType>.+)>)?$");
27 String m_str = null;
28 GroundTerm[] m_tuple;
21 29
22 String m_str = null;
23 GroundTerm[] m_tuple;
24
25 public AnswerTuple(TupleIterator iter, int arity) { 30 public AnswerTuple(TupleIterator iter, int arity) {
26 m_tuple = new GroundTerm[arity]; 31 m_tuple = new GroundTerm[arity];
27 try { 32 try {
@@ -29,107 +34,157 @@ public class AnswerTuple {
29 m_tuple[i] = iter.getGroundTerm(i); 34 m_tuple[i] = iter.getGroundTerm(i);
30 } catch (JRDFStoreException e) { 35 } catch (JRDFStoreException e) {
31 e.printStackTrace(); 36 e.printStackTrace();
32 } 37 }
33 } 38 }
34 39
35 public AnswerTuple(GroundTerm[] terms) { 40 public AnswerTuple(GroundTerm[] terms) {
36 m_tuple = terms; 41 m_tuple = terms;
37 } 42 }
38 43
44// private AnswerTuple(String m_str) {
45// this.m_str = m_str;
46// }
47
39 private AnswerTuple(AnswerTuple sup, int arity) { 48 private AnswerTuple(AnswerTuple sup, int arity) {
40 m_tuple = new GroundTerm[arity]; 49 m_tuple = new GroundTerm[arity];
41 for (int i = 0; i < arity; ++i) m_tuple[i] = sup.m_tuple[i]; 50 for(int i = 0; i < arity; ++i) m_tuple[i] = sup.m_tuple[i];
51 }
52
53 /**
54 * It returns the first argument if its arity equals length, a new AnswerTuple otherwise.
55 */
56 public static AnswerTuple getInstance(AnswerTuple extendedTuple, int length) {
57 if(length == extendedTuple.getArity()) return extendedTuple;
58 else return new AnswerTuple(extendedTuple, length);
42 } 59 }
43 60
44 public int getArity() { 61 public int getArity() {
45 return m_tuple.length; 62 return m_tuple.length;
46 } 63 }
47 64
48 public int hashCode() { 65 public int hashCode() {
49// return toString().hashCode(); 66// return toString().hashCode();
50 int code = 0; 67 int code = 0;
51 for (int i = 0; i < m_tuple.length; ++i) 68 for (int i = 0; i < m_tuple.length; ++i)
52 code = code * 1997 + m_tuple[i].hashCode(); 69 code = code * 1997 + m_tuple[i].hashCode();
53 return code; 70 return code;
54 } 71 }
55 72
56 public boolean equals(Object obj) { 73 public boolean equals(Object obj) {
57 if (!(obj instanceof AnswerTuple)) return false; 74 if (!(obj instanceof AnswerTuple)) return false;
58 AnswerTuple that = (AnswerTuple) obj; 75 AnswerTuple that = (AnswerTuple) obj;
59 if (m_tuple.length != that.m_tuple.length) return false; 76 if (m_tuple.length != that.m_tuple.length) return false;
60 for (int i = 0; i < m_tuple.length; ++i) 77 for (int i = 0; i < m_tuple.length; ++i)
61 if (!m_tuple[i].equals(that.m_tuple[i])) 78 if (!m_tuple[i].equals(that.m_tuple[i]))
62 return false; 79 return false;
63 return true; 80 return true;
64// return toString().equals(obj.toString()); 81// return toString().equals(obj.toString());
65 } 82 }
66 83
67 public String toString() { 84 public String toString() {
68 if (m_str != null) return m_str; 85 if(m_str != null) return m_str;
69 StringBuilder sb = new StringBuilder(); 86 StringBuilder sb = new StringBuilder();
70 for (int i = 0; i < m_tuple.length; ++i) { 87 for (int i = 0; i < m_tuple.length; ++i) {
71 if (sb.length() != 0) sb.append(SEPARATOR); 88 if (sb.length() != 0) sb.append(SEPARATOR);
72 if (m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.Individual) 89 if (m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.Individual)
73 sb.append("<").append(((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[i]).getIRI()).append(">"); 90 sb.append("<").append(((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[i]).getIRI()).append(">");
74 else if (m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.BlankNode) { 91 else if (m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.BlankNode) {
75 sb.append(((uk.ac.ox.cs.JRDFox.model.BlankNode) m_tuple[i]).toString()); 92 sb.append(m_tuple[i].toString());
76 } 93 }
77 else { 94 else {
78 Literal l = (Literal) m_tuple[i]; 95 Literal l = (Literal) m_tuple[i];
79 sb.append('"').append(l.getLexicalForm()).append("\""); 96 sb.append('"').append(l.getLexicalForm()).append("\"");
80 if (!l.getDatatype().equals(Datatype.XSD_STRING) && !l.getDatatype().equals(Datatype.RDF_PLAIN_LITERAL)) 97 if (!l.getDatatype().equals(Datatype.XSD_STRING) && !l.getDatatype().equals(Datatype.RDF_PLAIN_LITERAL))
81 sb.append("^^<").append(l.getDatatype().getIRI()).append(">"); 98 sb.append("^^<").append(l.getDatatype().getIRI()).append(">");
82 } 99 }
83 } 100 }
84 return m_str = sb.toString(); 101 return m_str = sb.toString();
85 } 102 }
86 103
87 public GroundTerm getGroundTerm(int i) { 104 public GroundTerm getGroundTerm(int i) {
88 return m_tuple[i]; 105 return m_tuple[i];
89 } 106 }
90 107
91 public Map<Variable, Term> getAssignment(String[] vars) { 108 public Map<Variable, Term> getAssignment(String[] vars) {
92 Map<Variable, Term> map = new HashMap<Variable, Term>(); 109 Map<Variable, Term> map = new HashMap<Variable, Term>();
93 int index = 0; 110 int index = 0;
94 Term t; 111 Term t;
95 for (String var: vars) { 112 for (String var: vars) {
96 if (m_tuple[index] instanceof uk.ac.ox.cs.JRDFox.model.Individual) 113 if(m_tuple[index] instanceof uk.ac.ox.cs.JRDFox.model.Individual)
97 t = Individual.create((((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[index]).getIRI())); 114 t = Individual.create((((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[index]).getIRI()));
98 else { 115 else {
99 uk.ac.ox.cs.JRDFox.model.Literal l = (uk.ac.ox.cs.JRDFox.model.Literal) m_tuple[index]; 116 uk.ac.ox.cs.JRDFox.model.Literal l = (uk.ac.ox.cs.JRDFox.model.Literal) m_tuple[index];
100 t = Constant.create(l.getLexicalForm(), l.getDatatype().getIRI()); 117 t = Constant.create(l.getLexicalForm(), l.getDatatype().getIRI());
101 } 118 }
102 map.put(Variable.create(var), t); 119 map.put(Variable.create(var), t);
103 ++index; 120 ++index;
104 } 121 }
105 return map; 122 return map;
106 } 123 }
107 124
108 public boolean hasAuxPredicate() { 125 public boolean hasAuxPredicate() {
109 String iri; 126 String iri;
110 for (int i = 0; i < m_tuple.length; ++i) 127 for (int i = 0; i < m_tuple.length; ++i)
111 if ((m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.Individual)) { 128 if ((m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.Individual)) {
112 iri = ((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[i]).getIRI(); 129 iri = ((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[i]).getIRI();
113 if ( iri.startsWith(Namespace.PAGODA_AUX) || iri.contains("_AUX") || iri.contains("_neg") || iri.contains("internal:def")) 130 if(iri.startsWith(Namespace.PAGODA_AUX) || iri.contains("_AUX") || iri.contains("_neg") || iri.contains("internal:def"))
114 return true; 131 return true;
115 } 132 }
116 return false; 133 return false;
117 } 134 }
118 135
119 public boolean hasAnonyIndividual() { 136 public boolean hasAnonymousIndividual() {
120 String iri; 137 String iri;
121 for (int i = 0; i < m_tuple.length; ++i) 138 for(int i = 0; i < m_tuple.length; ++i)
122 if ((m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.Individual)) { 139 if((m_tuple[i] instanceof uk.ac.ox.cs.JRDFox.model.Individual)) {
123 iri = ((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[i]).getIRI(); 140 iri = ((uk.ac.ox.cs.JRDFox.model.Individual) m_tuple[i]).getIRI();
124 if (iri.startsWith(Namespace.PAGODA_ANONY) || iri.startsWith(Namespace.KARMA_ANONY)) 141 if(iri.startsWith(Namespace.PAGODA_ANONY) || iri.startsWith(Namespace.KARMA_ANONY))
125 return true; 142 return true;
126 } 143 }
127 return false; 144 return false;
128 } 145 }
129 146
130 public static AnswerTuple create(AnswerTuple extendedTuple, int length) { 147 public static class AnswerTupleSerializer implements JsonSerializer<AnswerTuple> {
131 if (length == extendedTuple.getArity()) return extendedTuple; 148
132 else return new AnswerTuple(extendedTuple, length); 149 public JsonElement serialize(AnswerTuple src, Type typeOfSrc, JsonSerializationContext context) {
150 return new JsonPrimitive(src.toString());
151 }
152
133 } 153 }
134 154
155 public static class AnswerTupleDeserializer implements JsonDeserializer<AnswerTuple> {
156 public AnswerTuple deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
157 throws JsonParseException {
158 String tuplesString = json.getAsJsonPrimitive().getAsString();
159 StringTokenizer tokenizer = new StringTokenizer(SEPARATOR);
160 GroundTerm[] terms = new GroundTerm[tokenizer.countTokens()];
161
162 // TODO test parsing
163 for (int i = 0; i < tokenizer.countTokens(); i++) {
164 String token = tokenizer.nextToken();
165 if (token.charAt(0) == '<') {
166 terms[i] = uk.ac.ox.cs.JRDFox.model.Individual.create(token.substring(1,token.length()-1));
167 }
168 else if (token.charAt(0) == '"') {
169 Matcher matcher = owlLiteralRegex.matcher(token);
170 if(matcher.matches()) {
171 String lexicalForm = matcher.group("lexicalForm");
172 String dataTypeIRI = matcher.group("dataType");
173 Datatype dataType;
174 if (dataTypeIRI.isEmpty()) dataType = Datatype.RDF_PLAIN_LITERAL;
175 else dataType = uk.ac.ox.cs.JRDFox.model.Datatype.value(dataTypeIRI);
176 terms[i] = uk.ac.ox.cs.JRDFox.model.Literal.create(lexicalForm, dataType);
177 }
178 else {
179 throw new IllegalArgumentException("The given json does not represent a valid AnswerTuple");
180 }
181 }
182 else {
183 terms[i] = uk.ac.ox.cs.JRDFox.model.BlankNode.create(token);
184 }
185 }
186 return new AnswerTuple(terms);
187 }
188 }
189
135} 190}
diff --git a/src/uk/ac/ox/cs/pagoda/query/DeserializedQueryRecord.java b/src/uk/ac/ox/cs/pagoda/query/DeserializedQueryRecord.java
new file mode 100644
index 0000000..3d25eaf
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/query/DeserializedQueryRecord.java
@@ -0,0 +1,9 @@
1package uk.ac.ox.cs.pagoda.query;
2
3/*
4* A light version of QueryRecord,
5* which can be obtained easily from the Json serialization of QueryRecord.
6* */
7public class DeserializedQueryRecord {
8 // TODO implement
9}
diff --git a/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java b/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java
index d2e9b90..eaa629b 100644
--- a/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java
+++ b/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java
@@ -1,36 +1,29 @@
1package uk.ac.ox.cs.pagoda.query; 1package uk.ac.ox.cs.pagoda.query;
2 2
3import java.io.BufferedWriter; 3import org.semanticweb.HermiT.model.*;
4import java.io.FileOutputStream; 4import uk.ac.ox.cs.JRDFox.JRDFStoreException;
5import java.io.IOException; 5import uk.ac.ox.cs.JRDFox.Prefixes;
6import java.io.OutputStreamWriter; 6import uk.ac.ox.cs.JRDFox.store.DataStore;
7import java.util.Collection; 7import uk.ac.ox.cs.JRDFox.store.Parameters;
8 8import uk.ac.ox.cs.JRDFox.store.TupleIterator;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicConcept;
11import org.semanticweb.HermiT.model.AtomicRole;
12import org.semanticweb.HermiT.model.DLClause;
13import org.semanticweb.HermiT.model.Individual;
14
15import uk.ac.ox.cs.pagoda.MyPrefixes; 9import uk.ac.ox.cs.pagoda.MyPrefixes;
16import uk.ac.ox.cs.pagoda.owl.OWLHelper; 10import uk.ac.ox.cs.pagoda.owl.OWLHelper;
17import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 11import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
18import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager; 12import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager;
19import uk.ac.ox.cs.pagoda.util.Namespace; 13import uk.ac.ox.cs.pagoda.util.Namespace;
20import uk.ac.ox.cs.pagoda.util.Utility; 14import uk.ac.ox.cs.pagoda.util.Utility;
21import uk.ac.ox.cs.JRDFox.JRDFStoreException; 15
22import uk.ac.ox.cs.JRDFox.Prefixes; 16import java.io.BufferedWriter;
23import uk.ac.ox.cs.JRDFox.store.DataStore; 17import java.io.FileOutputStream;
24import uk.ac.ox.cs.JRDFox.store.Parameters; 18import java.io.IOException;
25import uk.ac.ox.cs.JRDFox.store.TupleIterator; 19import java.io.OutputStreamWriter;
20import java.util.Collection;
26 21
27public class GapByTriple extends GapTupleIterator<String> { 22public class GapByTriple extends GapTupleIterator<String> {
28 23
29 private static final String RDF_TYPE = Namespace.RDF_NS + "type";
30 private static final String BRIEF_RDF_TYPE = "rdf:type";
31
32 static final String allTripleQuery = "SELECT ?X ?Y ?Z WHERE { ?X ?Y ?Z }"; 24 static final String allTripleQuery = "SELECT ?X ?Y ?Z WHERE { ?X ?Y ?Z }";
33 25 private static final String RDF_TYPE = Namespace.RDF_NS + "type";
26 private static final String BRIEF_RDF_TYPE = "rdf:type";
34 DataStore lowerStore, upperStore; 27 DataStore lowerStore, upperStore;
35 long multi; 28 long multi;
36 TupleIterator iterator; 29 TupleIterator iterator;
diff --git a/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java b/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java
index 61b6364..bf11168 100644
--- a/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java
+++ b/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java
@@ -1,10 +1,10 @@
1package uk.ac.ox.cs.pagoda.query; 1package uk.ac.ox.cs.pagoda.query;
2 2
3import java.util.Iterator;
4
5import uk.ac.ox.cs.JRDFox.JRDFStoreException; 3import uk.ac.ox.cs.JRDFox.JRDFStoreException;
6import uk.ac.ox.cs.JRDFox.store.DataStore; 4import uk.ac.ox.cs.JRDFox.store.DataStore;
7 5
6import java.util.Iterator;
7
8public abstract class GapTupleIterator<T> implements Iterator<T> { 8public abstract class GapTupleIterator<T> implements Iterator<T> {
9 9
10 public static final String gapPredicateSuffix = "_AUXg"; 10 public static final String gapPredicateSuffix = "_AUXg";
diff --git a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java
index ce92a67..fd20af1 100644
--- a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java
+++ b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java
@@ -1,311 +1,345 @@
1package uk.ac.ox.cs.pagoda.query; 1package uk.ac.ox.cs.pagoda.query;
2 2
3import java.io.BufferedWriter; 3import com.google.gson.*;
4import java.io.FileNotFoundException; 4import com.google.gson.reflect.TypeToken;
5import java.io.FileOutputStream; 5import org.semanticweb.HermiT.model.*;
6import java.io.IOException; 6import org.semanticweb.owlapi.model.*;
7import java.io.OutputStreamWriter;
8import java.util.Collection;
9import java.util.HashMap;
10import java.util.HashSet;
11import java.util.Iterator;
12import java.util.LinkedList;
13import java.util.Map;
14import java.util.Set;
15
16import org.semanticweb.HermiT.model.Atom;
17import org.semanticweb.HermiT.model.AtomicConcept;
18import org.semanticweb.HermiT.model.AtomicRole;
19import org.semanticweb.HermiT.model.DLClause;
20import org.semanticweb.HermiT.model.DLPredicate;
21import org.semanticweb.HermiT.model.Variable;
22import org.semanticweb.owlapi.model.OWLAxiom;
23import org.semanticweb.owlapi.model.OWLClass;
24import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
25import org.semanticweb.owlapi.model.OWLDataProperty;
26import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom;
27import org.semanticweb.owlapi.model.OWLIndividual;
28import org.semanticweb.owlapi.model.OWLLiteral;
29import org.semanticweb.owlapi.model.OWLObjectProperty;
30import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom;
31import org.semanticweb.owlapi.model.OWLOntology;
32import org.semanticweb.owlapi.model.OWLOntologyManager;
33import org.semanticweb.owlapi.model.OWLOntologyStorageException;
34
35import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 7import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
36import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxAnswerTuples; 8import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxAnswerTuples;
37import uk.ac.ox.cs.pagoda.rules.GeneralProgram; 9import uk.ac.ox.cs.pagoda.rules.GeneralProgram;
38import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper; 10import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
39import uk.ac.ox.cs.pagoda.util.Namespace; 11import uk.ac.ox.cs.pagoda.util.Namespace;
12import uk.ac.ox.cs.pagoda.util.PagodaProperties;
40import uk.ac.ox.cs.pagoda.util.Utility; 13import uk.ac.ox.cs.pagoda.util.Utility;
14import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
15import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder;
16
17import java.io.*;
18import java.lang.reflect.Type;
19import java.util.*;
41 20
42public class QueryRecord { 21public class QueryRecord {
43 22
44 public static final String botQueryText = "SELECT ?X WHERE { ?X <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#Nothing> }"; 23 public static final String botQueryText = "SELECT ?X WHERE { ?X <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#Nothing> }";
45 24 public static final String SEPARATOR = "----------------------------------------";
46 private Step diffculty; 25 private static final String RDF_TYPE = "a"; //"rdf:type"; //RDF.type.toString();
47 26 boolean processed = false;
48 private String queryText; 27 String stringQueryID = null;
49 private int queryID = -1; 28 OWLOntology relevantOntology = null;
50 private String[][] answerVariables = null; 29 Set<DLClause> relevantClauses = new HashSet<DLClause>();
51 private Set<AnswerTuple> soundAnswerTuples = new HashSet<AnswerTuple>(); 30 double[] timer;
31 int subID;
32 DLClause queryClause = null;
33 private Step difficulty;
34 private String queryText;
35 private int queryID = -1;
36 private String[][] answerVariables = null;
37 private Set<AnswerTuple> soundAnswerTuples = new HashSet<AnswerTuple>();
52 private Set<AnswerTuple> gapAnswerTuples = null; 38 private Set<AnswerTuple> gapAnswerTuples = null;
53 39 private QueryManager m_manager;
54 private QueryManager m_manager; 40
55 41 private QueryRecord() {
42 }
43
44// private boolean containsAuxPredicate(String str) {
45// return str.contains(Namespace.PAGODA_AUX) || str.contains("_AUX") || str.contains("owl#Nothing") ||
46// str.contains("internal:def");
47// }
48
56 public QueryRecord(QueryManager manager, String text, int id, int subID) { 49 public QueryRecord(QueryManager manager, String text, int id, int subID) {
57 m_manager =manager; 50 m_manager = manager;
58 resetInfo(text, id, subID); 51 resetInfo(text, id, subID);
59 resetTimer(); 52 resetTimer();
60 } 53 }
61 54
55 public static Collection<String> collectQueryTexts(Collection<QueryRecord> queryRecords) {
56 Collection<String> texts = new LinkedList<String>();
57 for(QueryRecord record : queryRecords)
58 texts.add(record.queryText);
59 return texts;
60 }
61
62 public void resetInfo(String text, int id, int subid) { 62 public void resetInfo(String text, int id, int subid) {
63 queryID = id; 63 queryID = id;
64 subID = subid; 64 subID = subid;
65 stringQueryID = id + (subID == 0 ? "" : "_" + subID); 65 stringQueryID = id + (subID == 0 ? "" : "_" + subID);
66 m_manager.remove(queryText); 66 m_manager.remove(queryText);
67 m_manager.put(text, this); 67 m_manager.put(text, this);
68 queryClause = null; 68 queryClause = null;
69 answerVariables = ConjunctiveQueryHelper.getAnswerVariables(text); 69 answerVariables = ConjunctiveQueryHelper.getAnswerVariables(text);
70 queryText = text; // .replace("_:", "?"); 70 queryText = text; // .replace("_:", "?");
71 } 71 }
72 72
73 public void resetTimer() { 73 public void resetTimer() {
74 int length = Step.values().length; 74 int length = Step.values().length;
75 timer = new double[length]; 75 timer = new double[length];
76 for (int i = 0; i < length; ++i) timer[i] = 0; 76 for(int i = 0; i < length; ++i) timer[i] = 0;
77 } 77 }
78 78
79 public AnswerTuples getAnswers() { 79 public AnswerTuples getAnswers() {
80 if (processed()) 80 if(isProcessed())
81 return getLowerBoundAnswers(); 81 return getLowerBoundAnswers();
82 82
83 return getUpperBoundAnswers(); 83 return getUpperBoundAnswers();
84 } 84 }
85 85
86 public AnswerTuples getLowerBoundAnswers() { 86 public AnswerTuples getLowerBoundAnswers() {
87 return new AnswerTuplesImp(answerVariables[0], soundAnswerTuples); 87 return new AnswerTuplesImp(answerVariables[0], soundAnswerTuples);
88 } 88 }
89 89
90 public AnswerTuples getUpperBoundAnswers() { 90 public AnswerTuples getUpperBoundAnswers() {
91 return new AnswerTuplesImp(answerVariables[0], soundAnswerTuples, gapAnswerTuples); 91 return new AnswerTuplesImp(answerVariables[0], soundAnswerTuples, gapAnswerTuples);
92 } 92 }
93 93
94 public boolean updateLowerBoundAnswers(AnswerTuples answerTuples) { 94 public boolean updateLowerBoundAnswers(AnswerTuples answerTuples) {
95 if (answerTuples == null) return false; 95 if(answerTuples == null) return false;
96 boolean update = false; 96 boolean update = false;
97 for (AnswerTuple tuple; answerTuples.isValid(); answerTuples.moveNext()) { 97 for(AnswerTuple tuple; answerTuples.isValid(); answerTuples.moveNext()) {
98 tuple = answerTuples.getTuple(); 98 tuple = answerTuples.getTuple();
99 if (!soundAnswerTuples.contains(tuple) && (gapAnswerTuples == null || gapAnswerTuples.contains(tuple))) { 99 if(!soundAnswerTuples.contains(tuple) && (gapAnswerTuples == null || gapAnswerTuples.contains(tuple))) {
100 soundAnswerTuples.add(tuple); 100 soundAnswerTuples.add(tuple);
101 if (gapAnswerTuples != null) 101 if(gapAnswerTuples != null)
102 gapAnswerTuples.remove(tuple); 102 gapAnswerTuples.remove(tuple);
103 update = true; 103 update = true;
104 } 104 }
105 } 105 }
106 Utility.logInfo("The number of answers in the lower bound: " + soundAnswerTuples.size());
107 106
108 return update; 107 if(soundAnswerTuples.isEmpty())
108 Utility.logInfo("Lower bound answers empty");
109 else if(update)
110 Utility.logInfo("Lower bound answers updated: " + soundAnswerTuples.size());
111 else
112 Utility.logInfo("Lower bound answers unchanged");
113
114 return update;
109 } 115 }
110 116
111 public boolean updateUpperBoundAnswers(AnswerTuples answerTuples) { 117 public boolean updateUpperBoundAnswers(AnswerTuples answerTuples) {
112 return updateUpperBoundAnswers(answerTuples, false); 118 return updateUpperBoundAnswers(answerTuples, false);
113 } 119 }
114 120
115 public boolean updateUpperBoundAnswers(AnswerTuples answerTuples, boolean toCheckAux) { 121 public boolean updateUpperBoundAnswers(AnswerTuples answerTuples, boolean toCheckAux) {
116 RDFoxAnswerTuples rdfAnswerTuples; 122 if(!(answerTuples instanceof RDFoxAnswerTuples)) {
117 if (answerTuples instanceof RDFoxAnswerTuples) 123 String msg = "The upper bound must be computed by RDFox!";
118 rdfAnswerTuples = (RDFoxAnswerTuples) answerTuples; 124 Utility.logError(msg);
119 else { 125 throw new IllegalArgumentException(msg);
120 Utility.logError("The upper bound must be computed by RDFox!");
121 return false;
122 } 126 }
123 127
124 if (soundAnswerTuples.size() > 0) { 128 RDFoxAnswerTuples rdfoxAnswerTuples = (RDFoxAnswerTuples) answerTuples;
125 int number = 0; 129
126 for (; answerTuples.isValid(); answerTuples.moveNext()) { 130 Set<AnswerTuple> candidateGapAnswerTuples = new HashSet<AnswerTuple>();
127 ++number; 131 AnswerTuple tuple;
128 } 132 for(; rdfoxAnswerTuples.isValid(); rdfoxAnswerTuples.moveNext()) {
129 Utility.logInfo("The number of answers returned by the upper bound: " + number); 133 tuple = rdfoxAnswerTuples.getTuple();
130 if (number <= soundAnswerTuples.size()) { 134 if(isBottom() || !tuple.hasAnonymousIndividual())
131 if (gapAnswerTuples != null) gapAnswerTuples.clear(); 135 if((!toCheckAux || !tuple.hasAuxPredicate()) && !soundAnswerTuples.contains(tuple))
132 else gapAnswerTuples = new HashSet<AnswerTuple>(); 136 candidateGapAnswerTuples.add(tuple);
133
134 Utility.logInfo("The number of answers in the upper bound: " + (soundAnswerTuples.size() + gapAnswerTuples.size()));
135 return false;
136 }
137 answerTuples.reset();
138 } 137 }
139 138
140 boolean justCheck = (answerTuples.getArity() != answerVariables[1].length); 139 /*** START: debugging ***/
141 140 if(PagodaProperties.isDebuggingMode()) {
142 Set<AnswerTuple> tupleSet = new HashSet<AnswerTuple>(); 141 if(rdfoxAnswerTuples.getArity() != getAnswerVariables().length)
143 AnswerTuple tuple, extendedTuple; 142 throw new IllegalArgumentException(
144 for (; answerTuples.isValid(); answerTuples.moveNext()) { 143 "The arity of answers (" + rdfoxAnswerTuples.getArity() + ") " +
145 extendedTuple = rdfAnswerTuples.getTuple(); 144 "is different from the number of answer variables (" +
146 if (isBottom() || !extendedTuple.hasAnonyIndividual()) { 145 getAnswerVariables().length + ")");
147 tuple = AnswerTuple.create(extendedTuple, answerVariables[0].length); 146
148 if ((!toCheckAux || !tuple.hasAuxPredicate()) && !soundAnswerTuples.contains(tuple)) { 147 Set<AnswerTuple> namedAnswerTuples = new HashSet<>();
149 if (!toCheckAux && justCheck) return false; 148 rdfoxAnswerTuples.reset();
150 tupleSet.add(extendedTuple); 149 for(; rdfoxAnswerTuples.isValid(); rdfoxAnswerTuples.moveNext()) {
151 } 150 tuple = rdfoxAnswerTuples.getTuple();
151// if(isBottom() || !tuple.hasAnonymousIndividual()) {
152 namedAnswerTuples.add(tuple);
153// }
152 } 154 }
155 HashSet<AnswerTuple> difference = new HashSet<>(soundAnswerTuples);
156 difference.removeAll(namedAnswerTuples);
157 if(!difference.isEmpty())
158 throw new IllegalArgumentException("The upper bound does not contain the lower bound! Missing answers: " + difference
159 .size());
153 } 160 }
154 161 /*** END: debugging ***/
155 if (gapAnswerTuples == null) { 162
156 gapAnswerTuples = tupleSet; 163 boolean update;
157 164 if(gapAnswerTuples == null) {
158 Utility.logInfo("The number of answers in the upper bound: " + (soundAnswerTuples.size() + gapAnswerTuples.size())); 165 gapAnswerTuples = candidateGapAnswerTuples;
159 return true; 166 update = true;
160 } 167 } else {
161 168 update = gapAnswerTuples.retainAll(candidateGapAnswerTuples);
162 boolean update = false;
163 for (Iterator<AnswerTuple> iter = gapAnswerTuples.iterator(); iter.hasNext(); ) {
164 tuple = iter.next();
165 if (!tupleSet.contains(tuple)) {
166 iter.remove();
167 update = true;
168 }
169 } 169 }
170 170
171 Utility.logInfo("The number of answers in the upper bound: " + (soundAnswerTuples.size() + gapAnswerTuples.size())); 171 if(update)
172 172 Utility.logInfo("Upper bound answers updated: " + getNumberOfAnswers());
173 return update; 173 else
174 Utility.logInfo("Upper bound answers unchanged");
175
176 return update;
177
178// boolean update = false;
179// for(Iterator<AnswerTuple> iter = gapAnswerTuples.iterator(); iter.hasNext(); ) {
180// tuple = iter.next();
181// if(!candidateGapAnswerTuples.contains(tuple)) {
182// iter.remove();
183// update = true;
184// }
185// }
174 } 186 }
175
176// private boolean containsAuxPredicate(String str) {
177// return str.contains(Namespace.PAGODA_AUX) || str.contains("_AUX") || str.contains("owl#Nothing") ||
178// str.contains("internal:def");
179// }
180 187
181 boolean processed = false; 188 public int getNumberOfAnswers() {
189 return soundAnswerTuples.size() + gapAnswerTuples.size();
190 }
182 191
183 public void markAsProcessed() { 192 public void markAsProcessed() {
184 processed = true; 193 processed = true;
185 } 194 }
186 195
187 public boolean processed() { 196 public boolean isProcessed() {
188 if (gapAnswerTuples != null && gapAnswerTuples.isEmpty()) processed = true; 197 if(gapAnswerTuples != null && gapAnswerTuples.isEmpty()) processed = true;
189 return processed; 198 return processed;
190 } 199 }
191 200
192 public String[] getDistinguishedVariables() { 201 public String[] getDistinguishedVariables() {
193 return answerVariables[1]; 202 return answerVariables[1];
194 } 203 }
195 204
196 public String[] getAnswerVariables() { 205 public String[] getAnswerVariables() {
197 return answerVariables[0]; 206 return answerVariables[0];
198 } 207 }
199 208
200 public String[][] getVariables() { 209 public String[][] getVariables() {
201 return answerVariables; 210 return answerVariables;
202 } 211 }
203 212
204 public String getQueryText() { 213 public String getQueryText() {
205 return queryText; 214 return queryText;
206 } 215 }
207 216
208 String stringQueryID = null;
209
210 public String getQueryID() { 217 public String getQueryID() {
211 return stringQueryID; 218 return stringQueryID;
212 } 219 }
213 220
214 public AnswerTuples getGapAnswers() { 221 public AnswerTuples getGapAnswers() {
215 return new AnswerTuplesImp(answerVariables[0], gapAnswerTuples); 222 return new AnswerTuplesImp(answerVariables[0], gapAnswerTuples);
216 } 223 }
217 224
218 public String toString() { 225 public String toString() {
219 return queryText; 226 return queryText;
220 } 227 }
221
222 public static final String SEPARATOR = "----------------------------------------";
223 228
224 public void outputAnswers(BufferedWriter writer) throws IOException { 229 public void outputAnswers(BufferedWriter writer) throws IOException {
225 230
226 int answerCounter = soundAnswerTuples.size(); 231 int answerCounter = soundAnswerTuples.size();
227 if (!processed()) answerCounter += gapAnswerTuples.size(); 232 if(!isProcessed()) answerCounter += gapAnswerTuples.size();
228 233
229 Utility.logInfo("The number of answer tuples: " + answerCounter); 234 Utility.logInfo("The number of answer tuples: " + answerCounter);
230 235
231 if (writer != null) { 236 if (writer != null) {
232 writer.write("-------------- Query " + queryID + " ---------------------"); 237 writer.write("-------------- Query " + queryID + " ---------------------");
233 writer.newLine(); 238 writer.newLine();
234 writer.write(queryText); 239 writer.write(queryText);
235 writer.newLine(); 240 writer.newLine();
236 StringBuilder space = new StringBuilder(); 241 StringBuilder space = new StringBuilder();
237 int arity = getArity(), varSpace = 0; 242 int arity = getArity(), varSpace = 0;
238 for (int i = 0; i < arity; ++i) 243 for (int i = 0; i < arity; ++i)
239 varSpace += answerVariables[0][i].length(); 244 varSpace += answerVariables[0][i].length();
240 for (int i = 0; i < (SEPARATOR.length() - varSpace) / (arity + 1); ++i) 245 for (int i = 0; i < (SEPARATOR.length() - varSpace) / (arity + 1); ++i)
241 space.append(" "); 246 space.append(" ");
242 for (int i = 0; i < getArity(); ++i) { 247 for (int i = 0; i < getArity(); ++i) {
243 writer.write(space.toString()); 248 writer.write(space.toString());
244 writer.write(answerVariables[0][i]); 249 writer.write(answerVariables[0][i]);
245 } 250 }
246 writer.newLine(); 251 writer.newLine();
247 writer.write(SEPARATOR); 252 writer.write(SEPARATOR);
248 writer.newLine(); 253 writer.newLine();
249 for (AnswerTuple tuple: soundAnswerTuples) { 254 for (AnswerTuple tuple: soundAnswerTuples) {
250 writer.write(tuple.toString()); 255 writer.write(tuple.toString());
251 writer.newLine(); 256 writer.newLine();
252 } 257 }
253 if (!processed()) 258 if(!isProcessed())
254 for (AnswerTuple tuple: gapAnswerTuples) { 259 for (AnswerTuple tuple: gapAnswerTuples) {
255 writer.write("*"); 260 writer.write("*");
256 writer.write(tuple.toString()); 261 writer.write(tuple.toString());
257 writer.newLine(); 262 writer.newLine();
258 } 263 }
259// writer.write(SEPARATOR); 264// writer.write(SEPARATOR);
260 writer.newLine(); 265 writer.newLine();
261 } 266 }
262 267
263 } 268 }
264 269
270 public void outputAnswerStatistics() {
271
272 int answerCounter = soundAnswerTuples.size();
273 if(!isProcessed()) answerCounter += gapAnswerTuples.size();
274
275 Utility.logInfo("The number of answer tuples: " + answerCounter);
276// if (jsonAnswers != null) {
277// JSONObject jsonAnswer = new JSONObject();
278//
279// jsonAnswer.put("queryID", queryID);
280// jsonAnswer.put("queryText", queryText);
281//
282// JSONArray answerVars = new JSONArray();
283// int arity = getArity(), varSpace = 0;
284// for (int i = 0; i < getArity(); i++)
285// answerVars.add(answerVariables[0][i]);
286// jsonAnswer.put("answerVars", answerVars);
287//
288// JSONArray answerTuples = new JSONArray();
289// soundAnswerTuples.stream().forEach(t -> answerTuples.add(t));
290// jsonAnswer.put("answerTuples", answerTuples);
291//
292// if (!processed) {
293// JSONArray gapAnswerTuples = new JSONArray();
294// gapAnswerTuples.stream().forEach(t -> gapAnswerTuples.add(t));
295// }
296// jsonAnswer.put("gapAnswerTuples", gapAnswerTuples);
297//
298// jsonAnswers.put(Integer.toString(queryID), jsonAnswer);
299// }
300 }
301
265 public void outputTimes() { 302 public void outputTimes() {
266 for (Step step: Step.values()) { 303 for (Step step: Step.values()) {
267 Utility.logDebug("time for " + step + ": " + timer[step.ordinal()]); 304 Utility.logDebug("time for " + step + ": " + timer[step.ordinal()]);
268 } 305 }
269 } 306 }
270 307
271 public String outputSoundAnswerTuple() { 308 public String outputSoundAnswerTuple() {
272 StringBuilder builder = new StringBuilder(); 309 StringBuilder builder = new StringBuilder();
273 for (AnswerTuple tuple: soundAnswerTuples) 310 for (AnswerTuple tuple: soundAnswerTuples)
274 builder.append(tuple.toString()).append(Utility.LINE_SEPARATOR); 311 builder.append(tuple.toString()).append(Utility.LINE_SEPARATOR);
275 return builder.toString(); 312 return builder.toString();
276 } 313 }
277 314
278 public String outputGapAnswerTuple() { 315 public String outputGapAnswerTuple() {
279 StringBuilder builder = new StringBuilder(); 316 StringBuilder builder = new StringBuilder();
280 for (AnswerTuple tuple: gapAnswerTuples) 317 for(AnswerTuple tuple : gapAnswerTuples)
281 builder.append(tuple.toString()).append(Utility.LINE_SEPARATOR); 318 builder.append(tuple.toString()).append(Utility.LINE_SEPARATOR);
282 return builder.toString(); 319 return builder.toString();
283 }
284
285 public void setDifficulty(Step step) {
286 this.diffculty = step;
287 } 320 }
288 321
289 public Step getDifficulty() { 322 public Step getDifficulty() {
290 return diffculty; 323 return difficulty;
291 } 324 }
292 325
293 OWLOntology relevantOntology = null; 326 public void setDifficulty(Step step) {
294 Set<DLClause> relevantClauses = new HashSet<DLClause>(); 327 this.difficulty = step;
295
296 public void setRelevantOntology(OWLOntology knowledgebase) {
297 relevantOntology = knowledgebase;
298 } 328 }
299 329
300 public OWLOntology getRelevantOntology() { 330 public OWLOntology getRelevantOntology() {
301 return relevantOntology; 331 return relevantOntology;
332 }
333
334 public void setRelevantOntology(OWLOntology knowledgebase) {
335 relevantOntology = knowledgebase;
302 } 336 }
303 337
304 public void saveRelevantOntology(String filename) { 338 public void saveRelevantOntology(String filename) {
305 if (relevantOntology == null) return ; 339 if(relevantOntology == null) return;
306 OWLOntologyManager manager = relevantOntology.getOWLOntologyManager(); 340 OWLOntologyManager manager = relevantOntology.getOWLOntologyManager();
307 try { 341 try {
308 FileOutputStream outputStream = new FileOutputStream(filename); 342 FileOutputStream outputStream = new FileOutputStream(filename);
309 manager.saveOntology(relevantOntology, outputStream); 343 manager.saveOntology(relevantOntology, outputStream);
310 outputStream.close(); 344 outputStream.close();
311 } catch (OWLOntologyStorageException e) { 345 } catch (OWLOntologyStorageException e) {
@@ -318,12 +352,11 @@ public class QueryRecord {
318 } 352 }
319 353
320 public void saveRelevantClause() { 354 public void saveRelevantClause() {
321 if (relevantClauses == null) return ; 355 if(relevantClauses == null) return;
322 GeneralProgram p = new GeneralProgram(relevantClauses, relevantOntology); 356 GeneralProgram p = new GeneralProgram(relevantClauses, relevantOntology);
323 p.save(); 357 p.save();
324 } 358 }
325 359
326
327 public void removeUpperBoundAnswers(Collection<AnswerTuple> answers) { 360 public void removeUpperBoundAnswers(Collection<AnswerTuple> answers) {
328 for (AnswerTuple answer: answers) { 361 for (AnswerTuple answer: answers) {
329// if (soundAnswerTuples.contains(answer)) 362// if (soundAnswerTuples.contains(answer))
@@ -334,50 +367,37 @@ public class QueryRecord {
334 } 367 }
335 } 368 }
336 369
337
338 public void addLowerBoundAnswers(Collection<AnswerTuple> answers) { 370 public void addLowerBoundAnswers(Collection<AnswerTuple> answers) {
339 for (AnswerTuple answer: answers) { 371 for (AnswerTuple answer: answers) {
340 if (!gapAnswerTuples.contains(answer)) 372 if (!gapAnswerTuples.contains(answer))
341 Utility.logError("The answer (" + answer + ") cannot be added, because it is not in the upper bound."); 373 Utility.logError("The answer (" + answer + ") cannot be added, because it is not in the upper bound.");
342 gapAnswerTuples.remove(answer); 374 gapAnswerTuples.remove(answer);
343 375
344 answer = AnswerTuple.create(answer, answerVariables[0].length); 376 answer = AnswerTuple.getInstance(answer, answerVariables[0].length);
345// if (soundAnswerTuples.contains(answer)) 377// if (soundAnswerTuples.contains(answer))
346// Utility.logError("The answer (" + answer + ") cannot be added, because it is in the lower bound."); 378// Utility.logError("The answer (" + answer + ") cannot be added, because it is in the lower bound.");
347 soundAnswerTuples.add(answer); 379 soundAnswerTuples.add(answer);
348 } 380 }
349 } 381 }
350 382
351 public int getNoOfSoundAnswers() { 383 public int getNoOfSoundAnswers() {
352 return soundAnswerTuples.size(); 384 return soundAnswerTuples.size();
353 } 385 }
354
355 public enum Step {LowerBound, UpperBound, ELLowerBound,
356 Fragment, FragmentRefinement, Summarisation, Dependency, FullReasoning};
357
358 double[] timer;
359 386
360 public void addProcessingTime(Step step, double time) { 387 public void addProcessingTime(Step step, double time) {
361 timer[step.ordinal()] += time; 388 timer[step.ordinal()] += time;
362 } 389 }
363 390
364 public int getArity() { 391 public int getArity() {
365 return answerVariables[0].length; 392 return answerVariables[0].length;
366 } 393 }
367
368 public static Collection<String> collectQueryTexts(Collection<QueryRecord> queryRecords) {
369 Collection<String> texts = new LinkedList<String>();
370 for (QueryRecord record: queryRecords)
371 texts.add(record.queryText);
372 return texts;
373 }
374 394
375 public void addRelevantClauses(DLClause clause) { 395 public void addRelevantClauses(DLClause clause) {
376 relevantClauses.add(clause); 396 relevantClauses.add(clause);
377 } 397 }
378 398
379 public Set<DLClause> getRelevantClauses() { 399 public Set<DLClause> getRelevantClauses() {
380 return relevantClauses; 400 return relevantClauses;
381 } 401 }
382 402
383 public void clearClauses() { 403 public void clearClauses() {
@@ -388,36 +408,40 @@ public class QueryRecord {
388 for (DLClause clause: relevantClauses) 408 for (DLClause clause: relevantClauses)
389 if (clause.getHeadLength() > 1) 409 if (clause.getHeadLength() > 1)
390 return false; 410 return false;
391 return true; 411 return true;
392 } 412 }
393 413
394 public void saveABoxInTurtle(String filename) { 414 public void saveABoxInTurtle(String filename) {
395 try { 415 try {
396 BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename))); 416 BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(filename)));
397 OWLIndividual a, b; 417 OWLIndividual a, b;
398 StringBuilder builder = new StringBuilder(); 418 StringBuilder builder = new StringBuilder();
399 for (OWLAxiom axiom: relevantOntology.getABoxAxioms(true)) { 419 for (OWLAxiom axiom: relevantOntology.getABoxAxioms(true)) {
400 if (axiom instanceof OWLClassAssertionAxiom) { 420 if (axiom instanceof OWLClassAssertionAxiom) {
401 OWLClassAssertionAxiom classAssertion = (OWLClassAssertionAxiom) axiom; 421 OWLClassAssertionAxiom classAssertion = (OWLClassAssertionAxiom) axiom;
402 OWLClass c = (OWLClass) classAssertion.getClassExpression(); 422 OWLClass c = (OWLClass) classAssertion.getClassExpression();
403 a = classAssertion.getIndividual(); 423 a = classAssertion.getIndividual();
404 builder.append(a.toString()).append(" <").append(Namespace.RDF_TYPE).append("> ").append(c.toString()); 424 builder.append(a.toString())
425 .append(" <")
426 .append(Namespace.RDF_TYPE)
427 .append("> ")
428 .append(c.toString());
405 } 429 }
406 else if (axiom instanceof OWLObjectPropertyAssertionAxiom) { 430 else if (axiom instanceof OWLObjectPropertyAssertionAxiom) {
407 OWLObjectPropertyAssertionAxiom propertyAssertion = (OWLObjectPropertyAssertionAxiom) axiom; 431 OWLObjectPropertyAssertionAxiom propertyAssertion = (OWLObjectPropertyAssertionAxiom) axiom;
408 OWLObjectProperty p = (OWLObjectProperty) propertyAssertion.getProperty(); 432 OWLObjectProperty p = (OWLObjectProperty) propertyAssertion.getProperty();
409 a = propertyAssertion.getSubject(); 433 a = propertyAssertion.getSubject();
410 b = propertyAssertion.getObject(); 434 b = propertyAssertion.getObject();
411 builder.append(a.toString()).append(" ").append(p.toString()).append(" ").append(b.toString()); 435 builder.append(a.toString()).append(" ").append(p.toString()).append(" ").append(b.toString());
412 } 436 }
413 else if (axiom instanceof OWLDataPropertyAssertionAxiom) { 437 else if (axiom instanceof OWLDataPropertyAssertionAxiom) {
414 OWLDataPropertyAssertionAxiom propertyAssertion = (OWLDataPropertyAssertionAxiom) axiom; 438 OWLDataPropertyAssertionAxiom propertyAssertion = (OWLDataPropertyAssertionAxiom) axiom;
415 OWLDataProperty p = (OWLDataProperty) propertyAssertion.getProperty(); 439 OWLDataProperty p = (OWLDataProperty) propertyAssertion.getProperty();
416 a = propertyAssertion.getSubject(); 440 a = propertyAssertion.getSubject();
417 OWLLiteral l = propertyAssertion.getObject(); 441 OWLLiteral l = propertyAssertion.getObject();
418 builder.append(a.toString()).append(" ").append(p.toString()).append(" ").append(l.toString()); 442 builder.append(a.toString()).append(" ").append(p.toString()).append(" ").append(l.toString());
419 } 443 }
420 444
421 writer.write(builder.toString()); 445 writer.write(builder.toString());
422 writer.write(" ."); 446 writer.write(" .");
423 writer.newLine(); 447 writer.newLine();
@@ -427,23 +451,19 @@ public class QueryRecord {
427 } catch (IOException e) { 451 } catch (IOException e) {
428 e.printStackTrace(); 452 e.printStackTrace();
429 } finally { 453 } finally {
430 454
431 } 455 }
432 } 456 }
433
434 int subID;
435 457
436 public void updateSubID() { 458 public void updateSubID() {
437 ++subID; 459 ++subID;
438 stringQueryID = String.valueOf(queryID) + "_" + subID; 460 stringQueryID = String.valueOf(queryID) + "_" + subID;
439 } 461 }
440
441 DLClause queryClause = null;
442 462
443 public DLClause getClause() { 463 public DLClause getClause() {
444 if (queryClause != null) 464 if (queryClause != null)
445 return queryClause; 465 return queryClause;
446 return queryClause = DLClauseHelper.getQuery(queryText, null); 466 return queryClause = DLClauseHelper.getQuery(queryText, null);
447 } 467 }
448 468
449 public boolean isBottom() { 469 public boolean isBottom() {
@@ -453,19 +473,19 @@ public class QueryRecord {
453 public int getNoOfCompleteAnswers() { 473 public int getNoOfCompleteAnswers() {
454 return soundAnswerTuples.size() + gapAnswerTuples.size(); 474 return soundAnswerTuples.size() + gapAnswerTuples.size();
455 } 475 }
456 476
457 public int getSubID() { 477 public int getSubID() {
458 return subID; 478 return subID;
459 } 479 }
460 480
461 public boolean hasSameGapAnswers(QueryRecord that) { 481 public boolean hasSameGapAnswers(QueryRecord that) {
462 return gapAnswerTuples.containsAll(that.gapAnswerTuples) && that.gapAnswerTuples.containsAll(gapAnswerTuples); 482 return gapAnswerTuples.containsAll(that.gapAnswerTuples) && that.gapAnswerTuples.containsAll(gapAnswerTuples);
463 } 483 }
464 484
465 public void dispose() { 485 public void dispose() {
466 m_manager.remove(queryText); 486 m_manager.remove(queryText);
467 if (gapAnswerTuples != null) gapAnswerTuples = null; 487 if(gapAnswerTuples != null) gapAnswerTuples = null;
468 if (soundAnswerTuples != null) soundAnswerTuples = null; 488 if(soundAnswerTuples != null) soundAnswerTuples = null;
469 if (relevantClauses != null) relevantClauses.clear(); 489 if (relevantClauses != null) relevantClauses.clear();
470 if (relevantOntology != null) 490 if (relevantOntology != null)
471 relevantOntology.getOWLOntologyManager().removeOntology(relevantOntology); 491 relevantOntology.getOWLOntologyManager().removeOntology(relevantOntology);
@@ -473,75 +493,85 @@ public class QueryRecord {
473 } 493 }
474 494
475 public boolean canBeEncodedIntoAtom() { 495 public boolean canBeEncodedIntoAtom() {
476 // FIXME 496 // FIXME
477 return true; 497 return true;
478// return false; 498// return false;
479 } 499 }
480 500
481 public boolean isPredicate(AnswerTuple a, int i) { 501 public boolean isPredicate(AnswerTuple a, int i) {
482 Atom[] atoms = getClause().getBodyAtoms(); 502 Atom[] atoms = getClause().getBodyAtoms();
483 Variable v = Variable.create(answerVariables[1][i]); 503 Variable v = Variable.create(answerVariables[1][i]);
484 String iri; 504 String iri;
485 for (Atom atom: atoms) { 505 for(Atom atom : atoms) {
486 DLPredicate p = atom.getDLPredicate(); 506 DLPredicate p = atom.getDLPredicate();
487 if (p instanceof AtomicConcept) { 507 if (p instanceof AtomicConcept) {
488 if (((AtomicConcept) p).getIRI().equals(v.toString())) return true; 508 if(((AtomicConcept) p).getIRI().equals(v.toString())) return true;
489 } 509 }
490 else if (p instanceof AtomicRole) { 510 else if (p instanceof AtomicRole) {
491 iri = ((AtomicRole) p).getIRI(); 511 iri = ((AtomicRole) p).getIRI();
492 if (iri.equals(v.toString())) return true; 512 if (iri.equals(v.toString())) return true;
493 if (iri.startsWith("?")) 513 if(iri.startsWith("?"))
494 iri = a.getGroundTerm(i).toString(); 514 iri = a.getGroundTerm(i).toString();
495 if (iri.equals(Namespace.RDF_TYPE) && atom.getArgument(1).equals(v)) return true; 515 if(iri.equals(Namespace.RDF_TYPE) && atom.getArgument(1).equals(v)) return true;
496 } 516 }
497 } 517 }
498 return false; 518 return false;
499 } 519 }
500 520
501 public String[] getExtendedQueryText() { 521 // TODO remove fully extended query
502 String[] ret = new String[2]; 522 public Tuple<String> getExtendedQueryText() {
503 int index = queryText.toUpperCase().indexOf(" WHERE"); 523// String[] ret = new String[2];s
504 String extendedSelect = queryText.substring(0, index); 524 int index = queryText.toUpperCase().indexOf(" WHERE");
525 String extendedSelect = queryText.substring(0, index);
505 String extendedWhere= queryText.substring(index + 1), fullyExtendedWhere = queryText.substring(index + 1); 526 String extendedWhere= queryText.substring(index + 1), fullyExtendedWhere = queryText.substring(index + 1);
506 527
507 String sub, obj; 528 String sub, obj;
508 Map<String, Set<String>> links = new HashMap<String, Set<String>>(); 529 Map<String, Set<String>> links = new HashMap<String, Set<String>>();
509 Set<String> list; 530 Set<String> list;
510 for (Atom atom: getClause().getBodyAtoms()) 531 for (Atom atom: getClause().getBodyAtoms())
511 if (atom.getDLPredicate() instanceof AtomicRole && atom.getArgument(0) instanceof Variable && atom.getArgument(1) instanceof Variable) { 532 if (atom.getDLPredicate() instanceof AtomicRole && atom.getArgument(0) instanceof Variable && atom.getArgument(1) instanceof Variable) {
512 sub = atom.getArgumentVariable(0).getName(); 533 sub = atom.getArgumentVariable(0).getName();
513 obj = atom.getArgumentVariable(1).getName(); 534 obj = atom.getArgumentVariable(1).getName();
514 if ((list = links.get(sub)) == null) 535 if((list = links.get(sub)) == null)
515 links.put(sub, list = new HashSet<String>()); 536 links.put(sub, list = new HashSet<String>());
516 list.add(obj); 537 list.add(obj);
517 if ((list = links.get(obj)) == null) 538 if((list = links.get(obj)) == null)
518 links.put(obj, list = new HashSet<String>()); 539 links.put(obj, list = new HashSet<String>());
519 list.add(sub); 540 list.add(sub);
520 } 541 }
521 542
522 StringBuilder extra = new StringBuilder(), fullyExtra = new StringBuilder(); 543 StringBuilder extra = new StringBuilder(), fullyExtra = new StringBuilder();
523// if (answerVariables[0] != answerVariables[1]) { 544// if (answerVariables[0] != answerVariables[1]) {
524 for (int i = answerVariables[0].length; i < answerVariables[1].length; ++i) { 545 for (int i = answerVariables[0].length; i < answerVariables[1].length; ++i) {
525// for (int i = 0; i < answerVariables[1].length; ++i) { 546// for (int i = 0; i < answerVariables[1].length; ++i) {
526 fullyExtra.append(" . ?").append(answerVariables[1][i]).append(" a <").append(Namespace.PAGODA_ORIGINAL).append(">"); 547 fullyExtra.append(" . ?")
548 .append(answerVariables[1][i])
549 .append(" " + RDF_TYPE + " <")
550 .append(Namespace.PAGODA_ORIGINAL)
551 .append(">");
527 if ((list = links.get(answerVariables[1][i])) == null || list.size() < 2) ; 552 if ((list = links.get(answerVariables[1][i])) == null || list.size() < 2) ;
528 else { 553 else {
529 extra.append(" . ?").append(answerVariables[1][i]).append(" a <").append(Namespace.PAGODA_ORIGINAL).append(">"); 554 extra.append(" . ?")
530 } 555 .append(answerVariables[1][i])
556 .append(" " + RDF_TYPE + " <")
557 .append(Namespace.PAGODA_ORIGINAL)
558 .append(">");
559 }
531 } 560 }
532 561
533 if (extra.length() > 0) { 562 if(extra.length() > 0) {
534 extra.append(" }"); 563 extra.append(" }");
535 extendedWhere = extendedWhere.replace(" }", extendedWhere.contains(". }") ? extra.substring(2) : extra.toString()); 564 extendedWhere = extendedWhere.replace(" }", extendedWhere.contains(". }") ? extra.substring(2) : extra.toString());
536 } 565 }
537 566
538 if (fullyExtra.length() > 0) { 567 if(fullyExtra.length() > 0) {
539 fullyExtra.append(" }"); 568 fullyExtra.append(" }");
540 fullyExtendedWhere = fullyExtendedWhere.replace(" }", fullyExtendedWhere.contains(". }") ? fullyExtra.substring(2) : fullyExtra.toString()); 569 fullyExtendedWhere = fullyExtendedWhere.replace(" }", fullyExtendedWhere.contains(". }") ? fullyExtra.substring(2) : fullyExtra.toString());
541 } 570 }
542// } 571// }
543 572
544 ret[0] = extendedSelect + " " + fullyExtendedWhere; 573 TupleBuilder result = new TupleBuilder();
574 result.append(extendedSelect + " " + fullyExtendedWhere);
545 575
546 extra.setLength(0); 576 extra.setLength(0);
547 if (answerVariables[0] != answerVariables[1]) { 577 if (answerVariables[0] != answerVariables[1]) {
@@ -549,14 +579,125 @@ public class QueryRecord {
549 extra.append(" ?").append(answerVariables[1][i]); 579 extra.append(" ?").append(answerVariables[1][i]);
550 extendedSelect = extendedSelect + extra.toString(); 580 extendedSelect = extendedSelect + extra.toString();
551 } 581 }
552 ret[1] = extendedSelect + " " + extendedWhere; 582 result.append(extendedSelect + " " + extendedWhere);
553 583
554 return ret; 584 return result.build();
555 } 585 }
556 586
557 public boolean hasNonAnsDistinguishedVariables() { 587 public boolean hasNonAnsDistinguishedVariables() {
558 return answerVariables[1].length > answerVariables[0].length; 588 return answerVariables[1].length > answerVariables[0].length;
559 } 589 }
560
561 590
591 /**
592 * Two <tt>QueryRecords</tt> are equal iff
593 * they have the same <tt>queryText</tt>,
594 * <tt>soundAnswerTuples</tt>.
595 */
596 @Override
597 public boolean equals(Object o) {
598 if(!o.getClass().equals(getClass())) return false;
599 QueryRecord that = (QueryRecord) o;
600 return this.queryText.equals(that.queryText)
601 && soundAnswerTuples.equals(that.soundAnswerTuples);
602 }
603
604 @Override
605 public int hashCode() {
606 return Objects.hash(queryText, soundAnswerTuples);
607 }
608
609 public enum Step {
610 LOWER_BOUND,
611 UPPER_BOUND,
612 SIMPLE_UPPER_BOUND,
613 LAZY_UPPER_BOUND,
614 L_SKOLEM_UPPER_BOUND,
615 EL_LOWER_BOUND,
616 FRAGMENT,
617 FRAGMENT_REFINEMENT,
618 SUMMARISATION,
619 DEPENDENCY,
620 FULL_REASONING
621 }
622
623 /**
624 * A Json serializer, which considers the main attributes.
625 */
626 public static class QueryRecordSerializer implements JsonSerializer<QueryRecord> {
627 public JsonElement serialize(QueryRecord src, Type typeOfSrc, JsonSerializationContext context) {
628 Gson gson = new GsonBuilder().setPrettyPrinting().create();
629 JsonObject object = new JsonObject();
630 object.addProperty("queryID", src.queryID);
631 object.addProperty("queryText", src.queryText);
632// object.addProperty("difficulty", src.difficulty != null ? src.difficulty.toString() : "");
633
634 object.add("answerVariables", context.serialize(src.getAnswerVariables()));
635 object.add("answers", context.serialize(src.soundAnswerTuples));
636// object.add("gapAnswers", context.serialize(src.gapAnswerTuples));
637
638 return object;
639 }
640 }
641
642 /**
643 * A Json deserializer, compliant to the output of the serializer defined above.
644 */
645 public static class QueryRecordDeserializer implements JsonDeserializer<QueryRecord> {
646 public QueryRecord deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
647 throws JsonParseException {
648
649 QueryRecord record = new QueryRecord();
650 JsonObject object = json.getAsJsonObject();
651 record.queryID = object.getAsJsonPrimitive("queryID").getAsInt();
652 record.queryText = object.getAsJsonPrimitive("queryText").getAsString();
653// record.difficulty = Step.valueOf(object.getAsJsonPrimitive("difficulty").getAsString());
654
655 JsonArray answerVariablesJson = object.getAsJsonArray("answerVariables");
656 record.answerVariables = new String[2][];
657 record.answerVariables[0] = new String[answerVariablesJson.size()];
658 for(int i = 0; i < answerVariablesJson.size(); i++)
659 record.answerVariables[0][i] = answerVariablesJson.get(i).getAsString();
660
661 record.soundAnswerTuples = new HashSet<>();
662// record.gapAnswerTuples = new HashSet<>();
663 Type type = new TypeToken<AnswerTuple>() { }.getType();
664 for (JsonElement answer : object.getAsJsonArray("answers")) {
665 record.soundAnswerTuples.add(context.deserialize(answer, type));
666 }
667// for (JsonElement answer : object.getAsJsonArray("gapAnswers")) {
668// record.soundAnswerTuples.add(context.deserialize(answer, type));
669// }
670
671 return record;
672 }
673 }
674
675 /**
676 * Provides an instance (singleton) of Gson, having a specific configuration.
677 * */
678 public static class GsonCreator {
679
680 private static Gson gson;
681
682 private GsonCreator() {}
683
684 public static Gson getInstance() {
685 if(gson == null) {
686 gson = new GsonBuilder()
687 .registerTypeAdapter(AnswerTuple.class, new AnswerTuple.AnswerTupleSerializer())
688 .registerTypeAdapter(QueryRecord.class, new QueryRecord.QueryRecordSerializer())
689 .registerTypeAdapter(QueryRecord.class, new QueryRecord.QueryRecordDeserializer())
690 .registerTypeAdapter(AnswerTuple.class, new AnswerTuple.AnswerTupleDeserializer())
691 .disableHtmlEscaping()
692 .setPrettyPrinting()
693 .create();
694 }
695 return gson;
696 }
697
698// public static void dispose() {
699// gson = null;
700// }
701
702 }
562} 703}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java
index 9b862ce..ef9338a 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java
@@ -1,7 +1,5 @@
1package uk.ac.ox.cs.pagoda.reasoner; 1package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import java.util.LinkedList;
4
5import org.semanticweb.HermiT.model.Atom; 3import org.semanticweb.HermiT.model.Atom;
6import org.semanticweb.HermiT.model.AtomicConcept; 4import org.semanticweb.HermiT.model.AtomicConcept;
7import org.semanticweb.HermiT.model.DLClause; 5import org.semanticweb.HermiT.model.DLClause;
@@ -9,141 +7,146 @@ import org.semanticweb.HermiT.model.Variable;
9import org.semanticweb.owlapi.model.OWLOntology; 7import org.semanticweb.owlapi.model.OWLOntology;
10import org.semanticweb.owlapi.model.OWLOntologyCreationException; 8import org.semanticweb.owlapi.model.OWLOntologyCreationException;
11import org.semanticweb.owlapi.model.OWLOntologyManager; 9import org.semanticweb.owlapi.model.OWLOntologyManager;
12 10import uk.ac.ox.cs.JRDFox.JRDFStoreException;
11import uk.ac.ox.cs.JRDFox.store.DataStore;
12import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
13import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 13import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
14import uk.ac.ox.cs.pagoda.query.AnswerTuples; 14import uk.ac.ox.cs.pagoda.query.AnswerTuples;
15import uk.ac.ox.cs.pagoda.query.QueryManager; 15import uk.ac.ox.cs.pagoda.query.QueryManager;
16import uk.ac.ox.cs.pagoda.query.QueryRecord; 16import uk.ac.ox.cs.pagoda.query.QueryRecord;
17import uk.ac.ox.cs.pagoda.reasoner.full.Checker; 17import uk.ac.ox.cs.pagoda.reasoner.full.Checker;
18import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
18import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 19import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
19import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; 20import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter;
20import uk.ac.ox.cs.pagoda.tracking.QueryTracker; 21import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
21import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder; 22import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder;
22import uk.ac.ox.cs.pagoda.util.Timer; 23import uk.ac.ox.cs.pagoda.util.Timer;
23import uk.ac.ox.cs.pagoda.util.Utility; 24import uk.ac.ox.cs.pagoda.util.Utility;
24import uk.ac.ox.cs.JRDFox.JRDFStoreException; 25
25import uk.ac.ox.cs.JRDFox.store.DataStore; 26import java.util.LinkedList;
26import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
27 27
28public class ConsistencyManager { 28public class ConsistencyManager {
29 29
30 protected MyQueryReasoner m_reasoner; 30 protected MyQueryReasoner m_reasoner;
31 protected QueryManager m_queryManager; 31 protected QueryManager m_queryManager;
32 32
33 Timer t = new Timer(); 33 Timer t = new Timer();
34 QueryRecord fullQueryRecord;
35 QueryRecord[] botQueryRecords;
36 LinkedList<DLClause> toAddClauses;
37 boolean fragmentExtracted = false;
34 38
35 public ConsistencyManager(MyQueryReasoner reasoner) { 39 public ConsistencyManager(MyQueryReasoner reasoner) {
36 m_reasoner = reasoner; 40 m_reasoner = reasoner;
37 m_queryManager = reasoner.getQueryManager(); 41 m_queryManager = reasoner.getQueryManager();
38 } 42 }
39 43
40 QueryRecord fullQueryRecord;
41 QueryRecord[] botQueryRecords;
42
43 LinkedList<DLClause> toAddClauses;
44
45 boolean checkRLLowerBound() { 44 boolean checkRLLowerBound() {
46 fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0); 45 fullQueryRecord = m_queryManager.create(QueryRecord.botQueryText, 0);
47 AnswerTuples iter = null; 46 AnswerTuples iter = null;
48 47
49 try { 48 try {
50 iter = m_reasoner.rlLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); 49 iter = m_reasoner.rlLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
51 fullQueryRecord.updateLowerBoundAnswers(iter); 50 fullQueryRecord.updateLowerBoundAnswers(iter);
52 } finally { 51 } finally {
53 iter.dispose(); 52 iter.dispose();
54 } 53 }
55 54
56 if (fullQueryRecord.getNoOfSoundAnswers() > 0) { 55 if (fullQueryRecord.getNoOfSoundAnswers() > 0) {
57 Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); 56 Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple());
58 return unsatisfiability(t.duration()); 57 return false;
59 } 58 }
60 return true; 59 return true;
61 } 60 }
62 61
63 boolean checkELLowerBound() { 62 boolean checkELLowerBound() {
64 fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables())); 63 fullQueryRecord.updateLowerBoundAnswers(m_reasoner.elLowerStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()));
65 if (fullQueryRecord.getNoOfSoundAnswers() > 0) { 64 if (fullQueryRecord.getNoOfSoundAnswers() > 0) {
66 Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple()); 65 Utility.logInfo("Answers to bottom in the lower bound: ", fullQueryRecord.outputSoundAnswerTuple());
67 return unsatisfiability(t.duration()); 66 return true;
68 } 67 }
69 return true; 68 return true;
70 } 69 }
71 70
72 boolean checkLazyUpper() { 71 boolean checkUpper(BasicQueryEngine upperStore) {
73 if (m_reasoner.lazyUpperStore != null) { 72 if (upperStore != null) {
74 AnswerTuples tuples = null; 73 AnswerTuples tuples = null;
75 try { 74 try {
76 tuples = m_reasoner.lazyUpperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); 75 tuples = upperStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
77 if (!tuples.isValid()) { 76 if (!tuples.isValid()) {
78 Utility.logInfo("There are no contradictions derived in the lazy upper bound materialisation."); 77 Utility.logInfo("There are no contradictions derived in "+ upperStore.getName() +" materialisation.");
79 return satisfiability(t.duration()); 78 Utility.logDebug("The ontology and dataset is satisfiable.");
79 return true;
80 } 80 }
81 } 81 }
82 finally { 82 finally {
83 if (tuples != null) tuples.dispose(); 83 if (tuples != null) tuples.dispose();
84 } 84 }
85 } 85 }
86 return false; 86 return false;
87 } 87 }
88 88
89 void dispose() {
90 fullQueryRecord.dispose();
91 }
92
93// protected boolean unsatisfiability(double duration) {
94// fullQueryRecord.dispose();
95// Utility.logDebug("The ontology and dataset is unsatisfiable.");
96// return false;
97// }
98
99// protected boolean satisfiability(double duration) {
100// fullQueryRecord.dispose();
101// Utility.logDebug("The ontology and dataset is satisfiable.");
102// return true;
103// }
104
89 boolean check() { 105 boolean check() {
90// if (!checkRLLowerBound()) return false; 106// if (!checkRLLowerBound()) return false;
91// if (!checkELLowerBound()) return false; 107// if (!checkELLowerBound()) return false;
92// if (checkLazyUpper()) return true; 108// if (checkLazyUpper()) return true;
93 AnswerTuples iter = null; 109 AnswerTuples iter = null;
94 110
95 try { 111 try {
96 iter = m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables()); 112 iter =
113 m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
97 fullQueryRecord.updateUpperBoundAnswers(iter); 114 fullQueryRecord.updateUpperBoundAnswers(iter);
98 } finally { 115 } finally {
99 if (iter != null) iter.dispose(); 116 if(iter != null) iter.dispose();
100 } 117 }
101 118
102 if (fullQueryRecord.getNoOfCompleteAnswers() == 0) 119 if(fullQueryRecord.getNoOfCompleteAnswers() == 0)
103 return satisfiability(t.duration()); 120 return true;
104 121
105 extractBottomFragment(); 122 extractBottomFragment();
106 123
107 try { 124 try {
108 extractAxioms4Full(); 125 extractAxioms4Full();
109 } catch (OWLOntologyCreationException e) { 126 } catch(OWLOntologyCreationException e) {
110 e.printStackTrace(); 127 e.printStackTrace();
111 } 128 }
112// fullQueryRecord.saveRelevantClause(); 129// fullQueryRecord.saveRelevantClause();
113 130
114 boolean satisfiability; 131 boolean satisfiability;
115 132
116 Checker checker; 133 Checker checker;
117 for (QueryRecord r: getQueryRecords()) { 134 for(QueryRecord r : getQueryRecords()) {
118 // TODO to be removed ... 135 // TODO to be removed ...
119// r.saveRelevantOntology("bottom" + r.getQueryID() + ".owl"); 136// r.saveRelevantOntology("bottom" + r.getQueryID() + ".owl");
120 checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r); 137 checker = new HermitSummaryFilter(r, true); // m_reasoner.factory.getSummarisedReasoner(r);
121 satisfiability = checker.isConsistent(); 138 satisfiability = checker.isConsistent();
122 checker.dispose(); 139 checker.dispose();
123 if (!satisfiability) return unsatisfiability(t.duration()); 140 if(!satisfiability) return false;
124 } 141 }
125
126// Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord);
127// boolean satisfiable = checker.isConsistent();
128// checker.dispose();
129// if (!satisfiable) return unsatisfiability(t.duration());
130
131 return satisfiability(t.duration());
132 }
133 142
134 protected boolean unsatisfiability(double duration) { 143// Checker checker = m_reasoner.factory.getSummarisedReasoner(fullQueryRecord);
135 fullQueryRecord.dispose(); 144// boolean satisfiable = checker.isConsistent();
136 Utility.logDebug("The ontology and dataset is unsatisfiable."); 145// checker.dispose();
137 return false; 146// if (!satisfiable) return unsatisfiability(t.duration());
138 }
139 147
140 protected boolean satisfiability(double duration) {
141 fullQueryRecord.dispose();
142 Utility.logDebug("The ontology and dataset is satisfiable.");
143 return true; 148 return true;
144 } 149 }
145
146 boolean fragmentExtracted = false;
147 150
148 public void extractBottomFragment() { 151 public void extractBottomFragment() {
149 if (fragmentExtracted) return ; 152 if (fragmentExtracted) return ;
@@ -174,7 +177,7 @@ public class ConsistencyManager {
174 int[] group = new int[number - 1]; 177 int[] group = new int[number - 1];
175 for (int i = 0; i < number - 1; ++i) group[i] = i; 178 for (int i = 0; i < number - 1; ++i) group[i] = i;
176 for (int i = 0; i < number - 1; ++i) 179 for (int i = 0; i < number - 1; ++i)
177 if (tempQueryRecords[i].processed()) tempQueryRecords[i].dispose(); 180 if(tempQueryRecords[i].isProcessed()) tempQueryRecords[i].dispose();
178 else if (group[i] == i) { 181 else if (group[i] == i) {
179 ++bottomNumber; 182 ++bottomNumber;
180 record = tempQueryRecords[i]; 183 record = tempQueryRecords[i];
@@ -188,8 +191,8 @@ public class ConsistencyManager {
188 int bottomCounter = 0; 191 int bottomCounter = 0;
189 botQueryRecords = new QueryRecord[bottomNumber]; 192 botQueryRecords = new QueryRecord[bottomNumber];
190 Variable X = Variable.create("X"); 193 Variable X = Variable.create("X");
191 for (int i = 0; i < number - 1; ++i) 194 for (int i = 0; i < number - 1; ++i)
192 if (!tempQueryRecords[i].processed()) 195 if(!tempQueryRecords[i].isProcessed())
193 if (group[i] == i) { 196 if (group[i] == i) {
194 botQueryRecords[bottomCounter] = record = tempQueryRecords[i]; 197 botQueryRecords[bottomCounter] = record = tempQueryRecords[i];
195 record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter); 198 record.resetInfo(QueryRecord.botQueryText.replace("Nothing", "Nothing_final" + (++bottomCounter)), 0, group[i] = bottomCounter);
@@ -288,5 +291,6 @@ public class ConsistencyManager {
288 public QueryRecord[] getQueryRecords() { 291 public QueryRecord[] getQueryRecords() {
289 return botQueryRecords; 292 return botQueryRecords;
290 } 293 }
291 294
295
292} 296}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java
deleted file mode 100644
index 9c335f3..0000000
--- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager2.java
+++ /dev/null
@@ -1,78 +0,0 @@
1package uk.ac.ox.cs.pagoda.reasoner;
2
3import org.semanticweb.owlapi.model.OWLOntologyCreationException;
4import org.semanticweb.owlapi.model.OWLOntologyManager;
5
6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
7import uk.ac.ox.cs.pagoda.query.QueryRecord;
8import uk.ac.ox.cs.pagoda.reasoner.full.Checker;
9import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter;
10import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
11import uk.ac.ox.cs.pagoda.util.Utility;
12
13@Deprecated
14public class ConsistencyManager2 extends ConsistencyManager {
15
16 public ConsistencyManager2(MyQueryReasoner reasoner) {
17 super(reasoner);
18 fragmentExtracted = true;
19 }
20
21 protected boolean unsatisfiability(double duration) {
22 Utility.logDebug("The ontology and dataset is unsatisfiable.");
23 return false;
24 }
25
26 protected boolean satisfiability(double duration) {
27 Utility.logDebug("The ontology and dataset is satisfiable.");
28 return true;
29 }
30
31 @Override
32 boolean check() {
33// if (!checkRLLowerBound()) return false;
34// if (!checkELLowerBound()) return false;
35 if (checkLazyUpper()) return true;
36 AnswerTuples iter = null;
37
38 try {
39 iter = m_reasoner.trackingStore.evaluate(fullQueryRecord.getQueryText(), fullQueryRecord.getAnswerVariables());
40 fullQueryRecord.updateUpperBoundAnswers(iter);
41 } finally {
42 if (iter != null) iter.dispose();
43 }
44
45 if (fullQueryRecord.getNoOfCompleteAnswers() == 0)
46 return satisfiability(t.duration());
47
48 try {
49 extractAxioms();
50 } catch (OWLOntologyCreationException e) {
51 e.printStackTrace();
52 }
53
54 Checker checker = new HermitSummaryFilter(fullQueryRecord, true); // m_reasoner.factory.getSummarisedReasoner(fullQueryRecord);
55// fullQueryRecord.saveRelevantOntology("fragment_bottom.owl");
56 boolean satisfiable = checker.isConsistent();
57 checker.dispose();
58 if (!satisfiable) return unsatisfiability(t.duration());
59
60 return satisfiability(t.duration());
61 }
62
63 private void extractAxioms() throws OWLOntologyCreationException {
64 OWLOntologyManager manager = m_reasoner.encoder.getProgram().getOntology().getOWLOntologyManager();
65 fullQueryRecord.setRelevantOntology(manager.createOntology());
66 QueryTracker tracker = new QueryTracker(m_reasoner.encoder, m_reasoner.rlLowerStore, fullQueryRecord);
67 m_reasoner.encoder.setCurrentQuery(fullQueryRecord);
68 tracker.extract(m_reasoner.trackingStore, null, true);
69 }
70
71 @Override
72 public QueryRecord[] getQueryRecords() {
73 if (botQueryRecords == null)
74 botQueryRecords = new QueryRecord[] {fullQueryRecord};
75 return botQueryRecords;
76 }
77
78}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java
index ab57ccf..f5a8093 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOQueryReasoner.java
@@ -2,7 +2,6 @@ package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import org.semanticweb.karma2.profile.ELHOProfile; 3import org.semanticweb.karma2.profile.ELHOProfile;
4import org.semanticweb.owlapi.model.OWLOntology; 4import org.semanticweb.owlapi.model.OWLOntology;
5
6import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; 5import uk.ac.ox.cs.pagoda.constraints.UnaryBottom;
7import uk.ac.ox.cs.pagoda.query.AnswerTuples; 6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
8import uk.ac.ox.cs.pagoda.query.QueryRecord; 7import uk.ac.ox.cs.pagoda.query.QueryRecord;
@@ -12,7 +11,7 @@ import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram;
12import uk.ac.ox.cs.pagoda.util.Timer; 11import uk.ac.ox.cs.pagoda.util.Timer;
13import uk.ac.ox.cs.pagoda.util.Utility; 12import uk.ac.ox.cs.pagoda.util.Utility;
14 13
15public class ELHOQueryReasoner extends QueryReasoner { 14class ELHOQueryReasoner extends QueryReasoner {
16 15
17 LowerDatalogProgram program; 16 LowerDatalogProgram program;
18 17
@@ -35,9 +34,9 @@ public class ELHOQueryReasoner extends QueryReasoner {
35 } finally { 34 } finally {
36 if (elAnswer != null) elAnswer.dispose(); 35 if (elAnswer != null) elAnswer.dispose();
37 } 36 }
38 queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); 37 queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration());
39 38
40 queryRecord.setDifficulty(Step.ELLowerBound); 39 queryRecord.setDifficulty(Step.EL_LOWER_BOUND);
41 queryRecord.markAsProcessed(); 40 queryRecord.markAsProcessed();
42 } 41 }
43 42
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java
index 0d24a02..c74ea58 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/ELHOUQueryReasoner.java
@@ -2,7 +2,6 @@ package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import org.semanticweb.karma2.profile.ELHOProfile; 3import org.semanticweb.karma2.profile.ELHOProfile;
4import org.semanticweb.owlapi.model.OWLOntology; 4import org.semanticweb.owlapi.model.OWLOntology;
5
6import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
7import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; 6import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
8import uk.ac.ox.cs.pagoda.owl.OWLHelper; 7import uk.ac.ox.cs.pagoda.owl.OWLHelper;
@@ -15,7 +14,7 @@ import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
15import uk.ac.ox.cs.pagoda.util.Timer; 14import uk.ac.ox.cs.pagoda.util.Timer;
16import uk.ac.ox.cs.pagoda.util.Utility; 15import uk.ac.ox.cs.pagoda.util.Utility;
17 16
18public class ELHOUQueryReasoner extends QueryReasoner { 17class ELHOUQueryReasoner extends QueryReasoner {
19 18
20 DatalogProgram program; 19 DatalogProgram program;
21 20
@@ -26,81 +25,82 @@ public class ELHOUQueryReasoner extends QueryReasoner {
26 KarmaQueryEngine elLowerStore = null; 25 KarmaQueryEngine elLowerStore = null;
27 26
28 boolean multiStageTag, equalityTag; 27 boolean multiStageTag, equalityTag;
29 28 String originalMarkProgram;
29 private Timer t = new Timer();
30
30 public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { 31 public ELHOUQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
31 this.multiStageTag = multiStageTag; 32 this.multiStageTag = multiStageTag;
32 this.equalityTag = considerEqualities; 33 this.equalityTag = considerEqualities;
33 rlLowerStore = new BasicQueryEngine("rl-lower-bound"); 34 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
34 elLowerStore = new KarmaQueryEngine("el-lower-bound"); 35 elLowerStore = new KarmaQueryEngine("el-lower-bound");
35 36
36 if (!multiStageTag) 37 if(!multiStageTag)
37 rlUpperStore = new BasicQueryEngine("rl-upper-bound"); 38 rlUpperStore = new BasicQueryEngine("rl-upper-bound");
38 else 39 else
39 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); 40 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false);
40 } 41 }
41
42 private Timer t = new Timer();
43 42
44 @Override 43 @Override
45 public void evaluate(QueryRecord queryRecord) { 44 public void evaluate(QueryRecord queryRecord) {
46 AnswerTuples rlAnswer = null; 45 AnswerTuples rlAnswer = null;
47 t.reset(); 46 t.reset();
48 try { 47 try {
49 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 48 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
50 queryRecord.updateLowerBoundAnswers(rlAnswer); 49 queryRecord.updateLowerBoundAnswers(rlAnswer);
51 } finally { 50 } finally {
52 if (rlAnswer != null) rlAnswer.dispose(); 51 if(rlAnswer != null) rlAnswer.dispose();
53 } 52 }
54 queryRecord.addProcessingTime(Step.LowerBound, t.duration()); 53 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
55 54
56 String extendedQueryText = queryRecord.getExtendedQueryText()[0]; 55 String extendedQueryText = queryRecord.getExtendedQueryText().get(0);
57 String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ? 56 String[] toQuery = queryRecord.getQueryText().equals(extendedQueryText) ?
58 new String[] {queryRecord.getQueryText()} : 57 new String[]{queryRecord.getQueryText()} :
59 new String[] {queryRecord.getQueryText(), extendedQueryText}; 58 new String[] {queryRecord.getQueryText(), extendedQueryText};
60 59
61 for (String queryText: toQuery) { 60 for (String queryText: toQuery) {
62 rlAnswer = null; 61 rlAnswer = null;
63 t.reset(); 62 t.reset();
64 try { 63 try {
65 rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables()); 64 rlAnswer = rlUpperStore.evaluate(queryText, queryRecord.getAnswerVariables());
66 queryRecord.updateUpperBoundAnswers(rlAnswer); 65 queryRecord.updateUpperBoundAnswers(rlAnswer);
67 } finally { 66 } finally {
68 if (rlAnswer != null) rlAnswer.dispose(); 67 if(rlAnswer != null) rlAnswer.dispose();
69 } 68 }
70 queryRecord.addProcessingTime(Step.UpperBound, t.duration()); 69 queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration());
71 70
72 if (queryRecord.processed()) { 71 if(queryRecord.isProcessed()) {
73 queryRecord.setDifficulty(Step.UpperBound); 72 queryRecord.setDifficulty(Step.UPPER_BOUND);
74 return ; 73 return;
75 } 74 }
76 } 75 }
77 76
78 AnswerTuples elAnswer = null; 77 AnswerTuples elAnswer = null;
79 t.reset(); 78 t.reset();
80 try { 79 try {
81 elAnswer = elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); 80 elAnswer =
81 elLowerStore.evaluate(extendedQueryText, queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers());
82 queryRecord.updateLowerBoundAnswers(elAnswer); 82 queryRecord.updateLowerBoundAnswers(elAnswer);
83 } finally { 83 } finally {
84 if (elAnswer != null) elAnswer.dispose(); 84 if (elAnswer != null) elAnswer.dispose();
85 } 85 }
86 queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); 86 queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration());
87 } 87 }
88 88
89 @Override 89 @Override
90 public void evaluateUpper(QueryRecord queryRecord) { 90 public void evaluateUpper(QueryRecord queryRecord) {
91 AnswerTuples rlAnswer = null; 91 AnswerTuples rlAnswer = null;
92 try { 92 try {
93 rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 93 rlAnswer = rlUpperStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
94 queryRecord.updateUpperBoundAnswers(rlAnswer, true); 94 queryRecord.updateUpperBoundAnswers(rlAnswer, true);
95 } finally { 95 } finally {
96 if (rlAnswer != null) rlAnswer.dispose(); 96 if(rlAnswer != null) rlAnswer.dispose();
97 } 97 }
98 } 98 }
99 99
100 @Override 100 @Override
101 public void dispose() { 101 public void dispose() {
102 if (elLowerStore != null) elLowerStore.dispose(); 102 if (elLowerStore != null) elLowerStore.dispose();
103 if (rlUpperStore != null) rlUpperStore.dispose(); 103 if(rlUpperStore != null) rlUpperStore.dispose();
104 super.dispose(); 104 super.dispose();
105 } 105 }
106 106
@@ -110,19 +110,17 @@ public class ELHOUQueryReasoner extends QueryReasoner {
110 EqualitiesEliminator eliminator = new EqualitiesEliminator(o); 110 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
111 o = eliminator.getOutputOntology(); 111 o = eliminator.getOutputOntology();
112 eliminator.save(); 112 eliminator.save();
113 } 113 }
114
115 OWLOntology ontology = o;
116 program = new DatalogProgram(ontology, properties.getToClassify());
114 117
115 OWLOntology ontology = o;
116 program = new DatalogProgram(ontology, properties.getToClassify());
117
118 importData(program.getAdditionalDataFile()); 118 importData(program.getAdditionalDataFile());
119 119
120 elho_ontology = new ELHOProfile().getFragment(ontology); 120 elho_ontology = new ELHOProfile().getFragment(ontology);
121 elLowerStore.processOntology(elho_ontology); 121 elLowerStore.processOntology(elho_ontology);
122 originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); 122 originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
123 } 123 }
124
125 String originalMarkProgram;
126 124
127 @Override 125 @Override
128 public boolean preprocess() { 126 public boolean preprocess() {
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java
index 5511691..d1856c9 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java
@@ -1,32 +1,22 @@
1package uk.ac.ox.cs.pagoda.reasoner; 1package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import java.io.File;
4import java.io.IOException;
5import java.util.HashSet;
6import java.util.Set;
7
8import org.semanticweb.HermiT.Reasoner; 3import org.semanticweb.HermiT.Reasoner;
9import org.semanticweb.owlapi.model.OWLClassExpression; 4import org.semanticweb.owlapi.model.*;
10import org.semanticweb.owlapi.model.OWLDataFactory;
11import org.semanticweb.owlapi.model.OWLNamedIndividual;
12import org.semanticweb.owlapi.model.OWLOntology;
13import org.semanticweb.owlapi.model.OWLOntologyCreationException;
14import org.semanticweb.owlapi.model.OWLOntologyStorageException;
15
16import uk.ac.ox.cs.JRDFox.model.Individual; 5import uk.ac.ox.cs.JRDFox.model.Individual;
17import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 6import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
18import uk.ac.ox.cs.pagoda.owl.OWLHelper; 7import uk.ac.ox.cs.pagoda.owl.OWLHelper;
19import uk.ac.ox.cs.pagoda.owl.QueryRoller; 8import uk.ac.ox.cs.pagoda.owl.QueryRoller;
20import uk.ac.ox.cs.pagoda.query.AnswerTuple; 9import uk.ac.ox.cs.pagoda.query.*;
21import uk.ac.ox.cs.pagoda.query.AnswerTuples;
22import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp;
23import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
24import uk.ac.ox.cs.pagoda.query.QueryRecord;
25import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 10import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
26import uk.ac.ox.cs.pagoda.rules.DatalogProgram; 11import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
27import uk.ac.ox.cs.pagoda.util.Utility; 12import uk.ac.ox.cs.pagoda.util.Utility;
28 13
29public class HermiTReasoner extends QueryReasoner { 14import java.io.File;
15import java.io.IOException;
16import java.util.HashSet;
17import java.util.Set;
18
19class HermiTReasoner extends QueryReasoner {
30 20
31 Reasoner hermit; 21 Reasoner hermit;
32 22
@@ -54,15 +44,8 @@ public class HermiTReasoner extends QueryReasoner {
54 OWLOntology tbox = onto; 44 OWLOntology tbox = onto;
55 try { 45 try {
56 onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator)); 46 onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator));
57 importedOntologyPath = OWLHelper.getOntologyPath(onto); 47 importedOntologyPath = OWLHelper.getOntologyPath(onto);
58 } catch (OWLOntologyCreationException e) { 48 } catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) {
59 // TODO Auto-generated catch block
60 e.printStackTrace();
61 } catch (OWLOntologyStorageException e) {
62 // TODO Auto-generated catch block
63 e.printStackTrace();
64 } catch (IOException e) {
65 // TODO Auto-generated catch block
66 e.printStackTrace(); 49 e.printStackTrace();
67 } 50 }
68 51
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java b/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java
index 447a92d..7847e7c 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/IterativeRefinement.java
@@ -1,9 +1,6 @@
1package uk.ac.ox.cs.pagoda.reasoner; 1package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import java.io.File;
4
5import org.semanticweb.owlapi.model.OWLOntology; 3import org.semanticweb.owlapi.model.OWLOntology;
6
7import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 4import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
8import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; 5import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom;
9import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 6import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
@@ -14,6 +11,8 @@ import uk.ac.ox.cs.pagoda.rules.GeneralProgram;
14import uk.ac.ox.cs.pagoda.tracking.QueryTracker; 11import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
15import uk.ac.ox.cs.pagoda.util.Utility; 12import uk.ac.ox.cs.pagoda.util.Utility;
16 13
14import java.io.File;
15
17public class IterativeRefinement { 16public class IterativeRefinement {
18 17
19 private static final int depthLimit = 1; 18 private static final int depthLimit = 1;
@@ -23,16 +22,15 @@ public class IterativeRefinement {
23 BasicQueryEngine m_trackingStore; 22 BasicQueryEngine m_trackingStore;
24 QueryRecord[] botQueryRecords; 23 QueryRecord[] botQueryRecords;
25 24
26 int m_depth = 0; 25 int m_depth = 0;
27 26 String tempDataFile = "temp.ttl";
27
28 public IterativeRefinement(QueryRecord queryRecord, QueryTracker tracker, BasicQueryEngine trackingStore, QueryRecord[] botQueryRecords) { 28 public IterativeRefinement(QueryRecord queryRecord, QueryTracker tracker, BasicQueryEngine trackingStore, QueryRecord[] botQueryRecords) {
29 m_record = queryRecord; 29 m_record = queryRecord;
30 m_tracker = tracker; 30 m_tracker = tracker;
31 m_trackingStore = trackingStore; 31 m_trackingStore = trackingStore;
32 this.botQueryRecords = botQueryRecords; 32 this.botQueryRecords = botQueryRecords;
33 } 33 }
34
35 String tempDataFile = "temp.ttl";
36 34
37 public OWLOntology extractWithFullABox(String dataset, BottomStrategy upperBottom) { 35 public OWLOntology extractWithFullABox(String dataset, BottomStrategy upperBottom) {
38 GeneralProgram program; 36 GeneralProgram program;
@@ -58,8 +56,8 @@ public class IterativeRefinement {
58 } finally { 56 } finally {
59 tEngine.dispose(); 57 tEngine.dispose();
60 } 58 }
61 59
62 if (m_record.processed()) 60 if(m_record.isProcessed())
63 return null; 61 return null;
64 62
65 if (!update) break; 63 if (!update) break;
@@ -95,8 +93,8 @@ public class IterativeRefinement {
95 } finally { 93 } finally {
96 tEngine.dispose(); 94 tEngine.dispose();
97 } 95 }
98 96
99 if (m_record.processed()) 97 if(m_record.isProcessed())
100 return null; 98 return null;
101 99
102 if (!update) break; 100 if (!update) break;
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
index 55ecb81..618fb70 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java
@@ -1,69 +1,75 @@
1package uk.ac.ox.cs.pagoda.reasoner; 1package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import java.util.Collection;
4
5import org.semanticweb.karma2.profile.ELHOProfile; 3import org.semanticweb.karma2.profile.ELHOProfile;
6import org.semanticweb.owlapi.model.OWLOntology; 4import org.semanticweb.owlapi.model.OWLOntology;
7 5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
8import uk.ac.ox.cs.pagoda.multistage.*;
9import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; 6import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
10import uk.ac.ox.cs.pagoda.owl.OWLHelper; 7import uk.ac.ox.cs.pagoda.owl.OWLHelper;
11import uk.ac.ox.cs.pagoda.query.*; 8import uk.ac.ox.cs.pagoda.query.AnswerTuples;
9import uk.ac.ox.cs.pagoda.query.GapByStore4ID;
10import uk.ac.ox.cs.pagoda.query.GapByStore4ID2;
11import uk.ac.ox.cs.pagoda.query.QueryRecord;
12import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; 12import uk.ac.ox.cs.pagoda.query.QueryRecord.Step;
13import uk.ac.ox.cs.pagoda.reasoner.full.Checker; 13import uk.ac.ox.cs.pagoda.reasoner.full.Checker;
14import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 14import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
15import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine; 15import uk.ac.ox.cs.pagoda.reasoner.light.KarmaQueryEngine;
16import uk.ac.ox.cs.pagoda.rules.DatalogProgram; 16import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
17import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter; 17import uk.ac.ox.cs.pagoda.summary.HermitSummaryFilter;
18import uk.ac.ox.cs.pagoda.tracking.*; 18import uk.ac.ox.cs.pagoda.tracking.QueryTracker;
19import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder;
20import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderDisjVar1;
21import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoderWithGap;
19import uk.ac.ox.cs.pagoda.util.Timer; 22import uk.ac.ox.cs.pagoda.util.Timer;
20import uk.ac.ox.cs.pagoda.util.Utility; 23import uk.ac.ox.cs.pagoda.util.Utility;
24import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
21 25
22public class MyQueryReasoner extends QueryReasoner { 26import java.util.Collection;
23 27
24 OWLOntology ontology; 28class MyQueryReasoner extends QueryReasoner {
25 29
26// String additonalDataFile; 30 OWLOntology ontology;
27 31 DatalogProgram program;
28 DatalogProgram program;
29 32
33// String additonalDataFile;
30 BasicQueryEngine rlLowerStore = null; 34 BasicQueryEngine rlLowerStore = null;
31 BasicQueryEngine lazyUpperStore = null; 35 BasicQueryEngine lazyUpperStore = null;
36 // BasicQueryEngine limitedSkolemUpperStore;
37 OWLOntology elho_ontology;
32// boolean[] namedIndividuals_lazyUpper; 38// boolean[] namedIndividuals_lazyUpper;
33 39 KarmaQueryEngine elLowerStore = null;
34 OWLOntology elho_ontology;
35 KarmaQueryEngine elLowerStore = null;
36
37 BasicQueryEngine trackingStore = null; 40 BasicQueryEngine trackingStore = null;
38// boolean[] namedIndividuals_tracking; 41 // boolean[] namedIndividuals_tracking;
39 42 TrackingRuleEncoder encoder;
40 boolean equalityTag; 43 private boolean equalityTag;
41 boolean multiStageTag; 44 private boolean multiStageTag;
42 45 private Timer t = new Timer();
46 private Collection<String> predicatesWithGap = null;
47 private SatisfiabilityStatus satisfiable;
48 private ConsistencyManager consistency = new ConsistencyManager(this);
49 private boolean useUpperStores = false;
43 public MyQueryReasoner() { 50 public MyQueryReasoner() {
44 setup(true, true); 51 setup(true, true);
45 } 52 }
46
47 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { 53 public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
48 setup(multiStageTag, considerEqualities); 54 setup(multiStageTag, considerEqualities);
49 } 55 }
50 56
51 private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { 57 private BasicQueryEngine getUpperStore(String name, boolean checkValidity) {
52 if (multiStageTag) 58 if (multiStageTag)
53 return new MultiStageQueryEngine(name, checkValidity); 59 return new MultiStageQueryEngine(name, checkValidity);
54// return new TwoStageQueryEngine(name, checkValidity); 60// return new TwoStageQueryEngine(name, checkValidity);
55 else 61 else
56 return new BasicQueryEngine(name); 62 return new BasicQueryEngine(name);
57 } 63 }
58 64
59 public void setup(boolean multiStageTag, boolean considerEqualities) { 65 public void setup(boolean multiStageTag, boolean considerEqualities) {
60 satisfiable = null; 66 satisfiable = SatisfiabilityStatus.UNCHECKED;
61 this.multiStageTag = multiStageTag; 67 this.multiStageTag = multiStageTag;
62 this.equalityTag = considerEqualities; 68 this.equalityTag = considerEqualities;
63 69
64 rlLowerStore = new BasicQueryEngine("rl-lower-bound"); 70 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
65 elLowerStore = new KarmaQueryEngine("elho-lower-bound"); 71 elLowerStore = new KarmaQueryEngine("elho-lower-bound");
66 72
67 trackingStore = getUpperStore("tracking", false); 73 trackingStore = getUpperStore("tracking", false);
68 } 74 }
69 75
@@ -75,119 +81,155 @@ public class MyQueryReasoner extends QueryReasoner {
75 elLowerStore.importRDFData(name, datafile); 81 elLowerStore.importRDFData(name, datafile);
76 trackingStore.importRDFData(name, datafile); 82 trackingStore.importRDFData(name, datafile);
77 } 83 }
78 84
79 @Override 85 @Override
80 public void loadOntology(OWLOntology o) { 86 public void loadOntology(OWLOntology o) {
81 if (!equalityTag) { 87 if(!equalityTag) {
82 EqualitiesEliminator eliminator = new EqualitiesEliminator(o); 88 EqualitiesEliminator eliminator = new EqualitiesEliminator(o);
83 o = eliminator.getOutputOntology(); 89 o = eliminator.getOutputOntology();
84 eliminator.save(); 90 eliminator.save();
85 } 91 }
86 92
87 ontology = o; 93 ontology = o;
88 program = new DatalogProgram(ontology, properties.getToClassify()); 94 program = new DatalogProgram(ontology, properties.getToClassify());
89// program.getLower().save(); 95// program.getLower().save();
90// program.getUpper().save(); 96// program.getUpper().save();
91// program.getGeneral().save(); 97// program.getGeneral().save();
92 98
93 if (multiStageTag && !program.getGeneral().isHorn()) { 99 useUpperStores = multiStageTag && !program.getGeneral().isHorn();
94 lazyUpperStore = getUpperStore("lazy-upper-bound", true); // new MultiStageQueryEngine("lazy-upper-bound", true); // 100 if(useUpperStores) {
101 lazyUpperStore = getUpperStore("lazy-upper-bound", true);
102// limitedSkolemUpperStore = getUpperStore("limited-skolem-upper-bound", true);
95 } 103 }
96 104
97 importData(program.getAdditionalDataFile()); 105 importData(program.getAdditionalDataFile());
98 106
99 elho_ontology = new ELHOProfile().getFragment(ontology); 107 elho_ontology = new ELHOProfile().getFragment(ontology);
100 elLowerStore.processOntology(elho_ontology); 108 elLowerStore.processOntology(elho_ontology);
101 } 109 }
102 110
103 private Collection<String> predicatesWithGap = null;
104
105 public Collection<String> getPredicatesWithGap() { 111 public Collection<String> getPredicatesWithGap() {
106 return predicatesWithGap; 112 return predicatesWithGap;
107 } 113 }
108 114
109 @Override 115 @Override
110 public boolean preprocess() { 116 public boolean preprocess() {
111 t.reset(); 117 t.reset();
112 Utility.logInfo("Preprocessing ... checking satisfiability ... "); 118 Utility.logInfo("Preprocessing... checking satisfiability... ");
113 119
114 String name = "data", datafile = importedData.toString(); 120 String name = "data", datafile = importedData.toString();
115 rlLowerStore.importRDFData(name, datafile); 121 rlLowerStore.importRDFData(name, datafile);
116 rlLowerStore.materialise("lower program", program.getLower().toString()); 122 rlLowerStore.materialise("lower program", program.getLower().toString());
117// program.getLower().save(); 123// program.getLower().save();
118 if (!consistency.checkRLLowerBound()) return false; 124 if(!consistency.checkRLLowerBound()) return false;
119 Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); 125 Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber());
120 126
121 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); 127 String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology);
122 128
123 elLowerStore.importRDFData(name, datafile); 129 elLowerStore.importRDFData(name, datafile);
124 elLowerStore.materialise("saturate named individuals", originalMarkProgram); 130 elLowerStore.materialise("saturate named individuals", originalMarkProgram);
125 elLowerStore.materialise("lower program", program.getLower().toString()); 131 elLowerStore.materialise("lower program", program.getLower().toString());
126 elLowerStore.initialiseKarma(); 132 elLowerStore.initialiseKarma();
127 if (!consistency.checkELLowerBound()) return false; 133 if(!consistency.checkELLowerBound()) return false;
128 134
129 if (lazyUpperStore != null) { 135 if(lazyUpperStore != null) {
130 lazyUpperStore.importRDFData(name, datafile); 136 lazyUpperStore.importRDFData(name, datafile);
131 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram); 137 lazyUpperStore.materialise("saturate named individuals", originalMarkProgram);
132 int tag = lazyUpperStore.materialiseRestrictedly(program, null); 138 int tag = lazyUpperStore.materialiseRestrictedly(program, null);
133 if (tag != 1) { 139 if(tag != 1) {
134 lazyUpperStore.dispose(); 140 lazyUpperStore.dispose();
135 lazyUpperStore = null; 141 lazyUpperStore = null;
136 } 142 }
137 if (tag == -1) return false; 143 if(tag == -1) return false;
138 } 144 }
139 if (consistency.checkLazyUpper()) { 145 if(consistency.checkUpper(lazyUpperStore)) {
140 satisfiable = true; 146 satisfiable = SatisfiabilityStatus.SATISFIABLE;
141 Utility.logInfo("time for satisfiability checking: " + t.duration()); 147 Utility.logInfo("time for satisfiability checking: " + t.duration());
142 } 148 }
143 149
150// if(limitedSkolemUpperStore != null) {
151// limitedSkolemUpperStore.importRDFData(name, datafile);
152// limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram);
153// int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null);
154// if(tag != 1) {
155// limitedSkolemUpperStore.dispose();
156// limitedSkolemUpperStore = null;
157// }
158// if(tag == -1) return false;
159// }
160// if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) {
161// satisfiable = SatisfiabilityStatus.SATISFIABLE;
162// Utility.logInfo("time for satisfiability checking: " + t.duration());
163// }
164
144 trackingStore.importRDFData(name, datafile); 165 trackingStore.importRDFData(name, datafile);
145 trackingStore.materialise("saturate named individuals", originalMarkProgram); 166 trackingStore.materialise("saturate named individuals", originalMarkProgram);
146 167
147// materialiseFullUpper(); 168// materialiseFullUpper();
148// GapByStore4ID gap = new GapByStore4ID(trackingStore); 169// GapByStore4ID gap = new GapByStore4ID(trackingStore);
149 GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); 170 GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore);
150 trackingStore.materialiseFoldedly(program, gap); 171 trackingStore.materialiseFoldedly(program, gap);
151 predicatesWithGap = gap.getPredicatesWithGap(); 172 predicatesWithGap = gap.getPredicatesWithGap();
152 gap.clear(); 173 gap.clear();
153 174
154 if (program.getGeneral().isHorn()) 175 if(program.getGeneral().isHorn())
155 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore); 176 encoder = new TrackingRuleEncoderWithGap(program.getUpper(), trackingStore);
156 else 177 else
157 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore); 178 encoder = new TrackingRuleEncoderDisjVar1(program.getUpper(), trackingStore);
158// encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore); 179// encoder = new TrackingRuleEncoderDisj1(program.getUpper(), trackingStore);
159// encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore); 180// encoder = new TrackingRuleEncoderDisjVar2(program.getUpper(), trackingStore);
160// encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore); 181// encoder = new TrackingRuleEncoderDisj2(program.getUpper(), trackingStore);
161 182
162 program.deleteABoxTurtleFile(); 183 program.deleteABoxTurtleFile();
163 184
164 if (!isConsistent()) 185 if(!isConsistent())
165 return false; 186 return false;
166 187
167 consistency.extractBottomFragment(); 188 consistency.extractBottomFragment();
168 return true; 189 consistency.dispose();
190
191 return true;
169 } 192 }
170 193
171 private Boolean satisfiable;
172 private ConsistencyManager consistency = new ConsistencyManager(this);
173
174 TrackingRuleEncoder encoder;
175
176 @Override 194 @Override
177 public boolean isConsistent() { 195 public boolean isConsistent() {
178 if (satisfiable == null) { 196 if(satisfiable == SatisfiabilityStatus.UNCHECKED) {
179 satisfiable = consistency.check(); 197 satisfiable = consistency.check() ? SatisfiabilityStatus.SATISFIABLE : SatisfiabilityStatus.UNSATISFIABLE;
180 Utility.logInfo("time for satisfiability checking: " + t.duration()); 198 Utility.logInfo("time for satisfiability checking: " + t.duration());
181 } 199 }
182 return satisfiable; 200 return satisfiable == SatisfiabilityStatus.SATISFIABLE;
201 }
202
203 /**
204 * It deals with blanks nodes differently from variables
205 * according to SPARQL semantics for OWL2 Entailment Regime.
206 * <p>
207 * In particular variables are matched only against named individuals,
208 * and blank nodes against named and anonymous individuals.
209 */
210 private boolean queryUpperStore(BasicQueryEngine upperStore, QueryRecord queryRecord,
211 Tuple<String> extendedQuery, Step step) {
212
213 if(queryRecord.hasNonAnsDistinguishedVariables())
214 queryUpperBound(upperStore, queryRecord, extendedQuery.get(0), queryRecord.getAnswerVariables());
215 else
216 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
217
218 queryRecord.addProcessingTime(step, t.duration());
219 if(queryRecord.isProcessed()) {
220 queryRecord.setDifficulty(step);
221 return true;
222 }
223 return false;
183 } 224 }
184
185 Timer t = new Timer();
186 225
187 private OWLOntology relevantPart(QueryRecord queryRecord) { 226 /**
227 * Returns the part of the ontology relevant for Hermit, while computing the bound answers.
228 * */
229 private boolean queryBounds(QueryRecord queryRecord) {
188 AnswerTuples rlAnswer = null, elAnswer = null; 230 AnswerTuples rlAnswer = null, elAnswer = null;
189 231
190 t.reset(); 232 t.reset();
191 try { 233 try {
192 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 234 rlAnswer = rlLowerStore.evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
193 Utility.logDebug(t.duration()); 235 Utility.logDebug(t.duration());
@@ -195,130 +237,134 @@ public class MyQueryReasoner extends QueryReasoner {
195 } finally { 237 } finally {
196 if (rlAnswer != null) rlAnswer.dispose(); 238 if (rlAnswer != null) rlAnswer.dispose();
197 } 239 }
198 queryRecord.addProcessingTime(Step.LowerBound, t.duration()); 240 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
199 rlAnswer = null; 241
200
201 t.reset(); 242 t.reset();
202 BasicQueryEngine upperStore = queryRecord.isBottom() || lazyUpperStore == null ? trackingStore : lazyUpperStore; 243
203 244 Tuple<String> extendedQueryTexts = queryRecord.getExtendedQueryText();
204 String[] extendedQuery = queryRecord.getExtendedQueryText(); 245
205 246 Utility.logDebug("Tracking store");
206 queryUpperBound(upperStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 247 if(queryUpperStore(trackingStore, queryRecord, extendedQueryTexts, Step.SIMPLE_UPPER_BOUND))
207 if (!queryRecord.processed() && !queryRecord.getQueryText().equals(extendedQuery[0])) 248 return true;
208 queryUpperBound(upperStore, queryRecord, extendedQuery[0], queryRecord.getAnswerVariables()); 249
209 if (!queryRecord.processed() && queryRecord.hasNonAnsDistinguishedVariables()) 250 if(!queryRecord.isBottom()) {
210 queryUpperBound(upperStore, queryRecord, extendedQuery[1], queryRecord.getDistinguishedVariables()); 251 Utility.logDebug("Lazy store");
211 252 if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND))
212 queryRecord.addProcessingTime(Step.UpperBound, t.duration()); 253 return true;
213 if (queryRecord.processed()) { 254// Utility.logDebug("Skolem store");
214 queryRecord.setDifficulty(Step.UpperBound); 255// if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND))
215 return null; 256// return null;
216 } 257 }
217 258
218 t.reset(); 259 t.reset();
219 try { 260 try {
220 elAnswer = elLowerStore.evaluate(extendedQuery[0], queryRecord.getAnswerVariables(), queryRecord.getLowerBoundAnswers()); 261 elAnswer = elLowerStore.evaluate(extendedQueryTexts.get(0),
262 queryRecord.getAnswerVariables(),
263 queryRecord.getLowerBoundAnswers());
221 Utility.logDebug(t.duration()); 264 Utility.logDebug(t.duration());
222 queryRecord.updateLowerBoundAnswers(elAnswer); 265 queryRecord.updateLowerBoundAnswers(elAnswer);
223 } finally { 266 } finally {
224 if (elAnswer != null) elAnswer.dispose(); 267 if (elAnswer != null) elAnswer.dispose();
225 } 268 }
226 queryRecord.addProcessingTime(Step.ELLowerBound, t.duration()); 269 queryRecord.addProcessingTime(Step.EL_LOWER_BOUND, t.duration());
227 270
228 if (queryRecord.processed()) { 271 if(queryRecord.isProcessed()) {
229 queryRecord.setDifficulty(Step.ELLowerBound); 272 queryRecord.setDifficulty(Step.EL_LOWER_BOUND);
230 return null; 273 return true;
231 } 274 }
232 275
233 t.reset(); 276 return false;
234
235 QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord);
236
237 OWLOntology knowledgebase;
238 t.reset();
239// if (program.getGeneral().isHorn()) {
240// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true);
241// queryRecord.addProcessingTime(Step.Fragment, t.duration());
242// return knowledgebase;
243// }
244// else {
245 knowledgebase = tracker.extract(trackingStore, consistency.getQueryRecords(), true);
246 queryRecord.addProcessingTime(Step.Fragment, t.duration());
247// }
248
249 if (knowledgebase.isEmpty() || queryRecord.isBottom())
250 return knowledgebase;
251
252 if (program.getGeneral().isHorn()) return knowledgebase;
253
254// t.reset();
255// if (queryRecord.isHorn() && lazyUpperStore != null) {
256//// knowledgebase = tracker.extract(lazyUpperStore, consistency.getQueryRecords(), true);
257// } else if (queryRecord.getArity() < 3) {
258// IterativeRefinement iterativeRefinement = new IterativeRefinement(queryRecord, tracker, trackingStore, consistency.getQueryRecords());
259// knowledgebase = iterativeRefinement.extractWithFullABox(importedData.toString(), program.getUpperBottomStrategy());
260// }
261//
262// queryRecord.addProcessingTime(Step.FragmentRefinement, t.duration());
263//
264// if (knowledgebase == null)
265// queryRecord.setDifficulty(Step.FragmentRefinement);
266
267 return knowledgebase;
268 } 277 }
269 278
270// int counter = 0; 279 private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) {
280 t.reset();
281
282 QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord);
283 OWLOntology relevantOntologySubset = tracker.extract(trackingStore, consistency.getQueryRecords(), true);
284
285 queryRecord.addProcessingTime(Step.FRAGMENT, t.duration());
286
287 return relevantOntologySubset;
288 }
271 289
272 private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { 290 private void queryUpperBound(BasicQueryEngine upperStore, QueryRecord queryRecord, String queryText, String[] answerVariables) {
273 AnswerTuples rlAnswer = null; 291 AnswerTuples rlAnswer = null;
274 try { 292 try {
275 Utility.logDebug(queryText); 293 Utility.logDebug(queryText);
276 rlAnswer = upperStore.evaluate(queryText, answerVariables); 294 rlAnswer = upperStore.evaluate(queryText, answerVariables);
277 Utility.logDebug(t.duration()); 295 Utility.logDebug(t.duration());
278 queryRecord.updateUpperBoundAnswers(rlAnswer); 296 queryRecord.updateUpperBoundAnswers(rlAnswer);
279 } finally { 297 } finally {
280 if (rlAnswer != null) rlAnswer.dispose(); 298 if(rlAnswer != null) rlAnswer.dispose();
281 rlAnswer = null;
282 } 299 }
283 } 300 }
284 301
285 @Override 302 @Override
286 public void evaluate(QueryRecord queryRecord) { 303 public void evaluate(QueryRecord queryRecord) {
287 OWLOntology knowledgebase = relevantPart(queryRecord); 304 if(queryBounds(queryRecord))
288 305 return;
289 if (knowledgebase == null) { 306
290 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty()); 307 OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord);
291 return ; 308
292 } 309 int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size();
293 310 Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset
294 int aboxcount = knowledgebase.getABoxAxioms(true).size(); 311 .getAxiomCount() - aBoxCount));
295 Utility.logDebug("ABox axioms: " + aboxcount + " TBox axioms: " + (knowledgebase.getAxiomCount() - aboxcount)); 312// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl");
296// queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); 313
297 314 if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord))
298 Timer t = new Timer(); 315 return;
299 Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT()); 316
300// int validNumber = 317 Timer t = new Timer();
301 summarisedChecker.check(queryRecord.getGapAnswers()); 318 Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT());
319 summarisedChecker.check(queryRecord.getGapAnswers());
302 summarisedChecker.dispose(); 320 summarisedChecker.dispose();
303 Utility.logDebug("Total time for full reasoner: " + t.duration()); 321 Utility.logDebug("Total time for full reasoner: " + t.duration());
304// if (validNumber == 0) { 322 queryRecord.markAsProcessed();
305 queryRecord.markAsProcessed(); 323 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
306 Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
307// }
308 } 324 }
309 325
326 private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) {
327 MultiStageQueryEngine relevantStore =
328 new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true
329 DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false
330
331// relevantStore.importRDFData("data", importedData.toString()); // 2 answers more
332 relevantStore.importDataFromABoxOf(relevantSubset);
333
334 int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null);
335 if(materialisationResult != 1)
336 throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency
337// relevantStore.materialiseRestrictedly(relevantProgram, null); // it has been tried
338
339 return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND);
340
341 // the following has been tried
342// Tuple<String> extendedQueryText = queryRecord.getExtendedQueryText();
343// if(queryRecord.hasNonAnsDistinguishedVariables()) {
344// queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(0), queryRecord.getAnswerVariables());
345// queryUpperBound(relevantStore, queryRecord, extendedQueryText.get(1), queryRecord.getDistinguishedVariables());
346// }
347// else
348// queryUpperBound(relevantStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables());
349//
350// return queryRecord.isProcessed();
351
352 }
353
310 @Override 354 @Override
311 public void evaluateUpper(QueryRecord queryRecord) { 355 public void evaluateUpper(QueryRecord queryRecord) {
312 AnswerTuples rlAnswer = null; 356 // TODO add new upper store
313 boolean useFull = queryRecord.isBottom() || lazyUpperStore == null; 357 AnswerTuples rlAnswer = null;
358 boolean useFull = queryRecord.isBottom() || lazyUpperStore == null;
314 try { 359 try {
315 rlAnswer = (useFull ? trackingStore: lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables()); 360 rlAnswer =
316 queryRecord.updateUpperBoundAnswers(rlAnswer, true); 361 (useFull ? trackingStore : lazyUpperStore).evaluate(queryRecord.getQueryText(), queryRecord.getAnswerVariables());
362 queryRecord.updateUpperBoundAnswers(rlAnswer, true);
317 } finally { 363 } finally {
318 if (rlAnswer != null) rlAnswer.dispose(); 364 if(rlAnswer != null) rlAnswer.dispose();
319 } 365 }
320 } 366 }
321 367
322 @Override 368 @Override
323 public void dispose() { 369 public void dispose() {
324 if (encoder != null) encoder.dispose(); 370 if (encoder != null) encoder.dispose();
@@ -326,7 +372,11 @@ public class MyQueryReasoner extends QueryReasoner {
326 if (lazyUpperStore != null) lazyUpperStore.dispose(); 372 if (lazyUpperStore != null) lazyUpperStore.dispose();
327 if (elLowerStore != null) elLowerStore.dispose(); 373 if (elLowerStore != null) elLowerStore.dispose();
328 if (trackingStore != null) trackingStore.dispose(); 374 if (trackingStore != null) trackingStore.dispose();
375
376// if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose();
329 super.dispose(); 377 super.dispose();
330 } 378 }
331 379
380 enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED}
381
332} 382}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
index f2b7251..118c1b2 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/QueryReasoner.java
@@ -1,42 +1,44 @@
1package uk.ac.ox.cs.pagoda.reasoner; 1package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import java.io.BufferedWriter; 3import com.google.gson.Gson;
4import java.io.File;
5import java.io.FileNotFoundException;
6import java.io.FileOutputStream;
7import java.io.IOException;
8import java.io.OutputStreamWriter;
9import java.util.Collection;
10
11import org.semanticweb.owlapi.model.OWLOntology; 4import org.semanticweb.owlapi.model.OWLOntology;
12
13import uk.ac.ox.cs.pagoda.owl.OWLHelper; 5import uk.ac.ox.cs.pagoda.owl.OWLHelper;
14import uk.ac.ox.cs.pagoda.query.AnswerTuples; 6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
15import uk.ac.ox.cs.pagoda.query.QueryManager; 7import uk.ac.ox.cs.pagoda.query.QueryManager;
16import uk.ac.ox.cs.pagoda.query.QueryRecord; 8import uk.ac.ox.cs.pagoda.query.QueryRecord;
17import uk.ac.ox.cs.pagoda.util.Properties; 9import uk.ac.ox.cs.pagoda.util.PagodaProperties;
18import uk.ac.ox.cs.pagoda.util.Timer; 10import uk.ac.ox.cs.pagoda.util.Timer;
19import uk.ac.ox.cs.pagoda.util.Utility; 11import uk.ac.ox.cs.pagoda.util.Utility;
20 12
13import java.io.BufferedWriter;
14import java.io.File;
15import java.io.IOException;
16import java.nio.file.Files;
17import java.nio.file.Paths;
18import java.util.Collection;
19
20// TODO clean APIs
21public abstract class QueryReasoner { 21public abstract class QueryReasoner {
22 22
23 public static final String ImportDataFileSeparator = ";";
24 private static final boolean DEFAULT_MULTI_STAGES = true;
25 private static final boolean DEFAULT_EQUALITIES = true;
26 public boolean fullReasoner = this instanceof MyQueryReasoner;
27 protected StringBuilder importedData = new StringBuilder();
23// protected boolean forSemFacet = false; 28// protected boolean forSemFacet = false;
24 Properties properties; 29PagodaProperties properties;
25 30 BufferedWriter answerWriter = null;
26 private static boolean defaultMultiStages = true; 31 private QueryManager m_queryManager = new QueryManager();
27 private static boolean defaultEqualities = true;
28 32
29 public static enum Type { Full, RLU, ELHOU }; 33 public static QueryReasoner getInstance(PagodaProperties p) {
30
31 public static QueryReasoner getInstance(Properties p) {
32 OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath()); 34 OWLOntology ontology = OWLHelper.loadOntology(p.getOntologyPath());
33 QueryReasoner pagoda = getInstance(ontology, p); 35 QueryReasoner pagoda = getInstance(ontology, p);
34 pagoda.properties = p; 36 pagoda.properties = p;
35 pagoda.loadOntology(ontology); 37 pagoda.loadOntology(ontology);
36 pagoda.importData(p.getDataPath()); 38 pagoda.importData(p.getDataPath());
37 if (pagoda.preprocess()) { 39 if (pagoda.preprocess()) {
38 Utility.logInfo("The ontology is consistent!"); 40 Utility.logInfo("The ontology is consistent!");
39 return pagoda; 41 return pagoda;
40 } 42 }
41 else { 43 else {
42 System.out.println("The ontology is inconsistent!"); 44 System.out.println("The ontology is inconsistent!");
@@ -44,60 +46,63 @@ public abstract class QueryReasoner {
44 return null; 46 return null;
45 } 47 }
46 } 48 }
47 49
48 public static QueryReasoner getInstance(OWLOntology o) { 50 public static QueryReasoner getInstance(OWLOntology o) {
49 QueryReasoner pagoda = getInstance(Type.Full, o, defaultMultiStages, defaultEqualities); 51 QueryReasoner pagoda = getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
50 pagoda.properties = new Properties(); 52 pagoda.properties = new PagodaProperties();
51 return pagoda; 53 return pagoda;
52 }
53
54 public void setToClassify(boolean flag) {
55 properties.setToClassify(flag);
56 }
57
58 public void setToCallHermiT(boolean flag) {
59 properties.setToCallHermiT(flag);
60 } 54 }
61 55
62 private static QueryReasoner getInstance(OWLOntology o, Properties p) { 56 private static QueryReasoner getInstance(OWLOntology o, PagodaProperties p) {
63 return getInstance(Type.Full, o, defaultMultiStages, defaultEqualities); 57 return getInstance(Type.Full, o, DEFAULT_MULTI_STAGES, DEFAULT_EQUALITIES);
64 } 58 }
65 59
66 public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) { 60 public static QueryReasoner getInstance(Type type, OWLOntology o, boolean performMultiStages, boolean considerEqualities) {
67 Utility.initialise(); 61// Utility.initialise();
68 QueryReasoner reasoner; 62 QueryReasoner reasoner;
69 if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner(); 63 if (OWLHelper.isInOWL2RL(o)) reasoner = new RLQueryReasoner();
70 else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner(); 64 else if (OWLHelper.isInELHO(o)) reasoner = new ELHOQueryReasoner();
71 else 65 else
72 switch (type) { 66 switch (type) {
73 case RLU: 67 case RLU:
74 reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities); break; 68 reasoner = new RLUQueryReasoner(performMultiStages, considerEqualities);
75 case ELHOU: 69 break;
76 reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities); break; 70 case ELHOU:
77 default: 71 reasoner = new ELHOUQueryReasoner(performMultiStages, considerEqualities);
78 reasoner = new MyQueryReasoner(performMultiStages, considerEqualities); 72 break;
73 default:
74 reasoner = new MyQueryReasoner(performMultiStages, considerEqualities);
79 } 75 }
80 return reasoner; 76 return reasoner;
77 }
78
79 public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) {
80 return new HermiTReasoner(toCheckSatisfiability);
81 }
82
83 public void setToClassify(boolean flag) {
84 properties.setToClassify(flag);
85 }
86
87 public void setToCallHermiT(boolean flag) {
88 properties.setToCallHermiT(flag);
81 } 89 }
82
83 public static final String ImportDataFileSeparator = ";";
84 protected StringBuilder importedData = new StringBuilder();
85 90
86 public void importData(String datafile) { 91 public void importData(String datafile) {
87 if (datafile != null && !datafile.equalsIgnoreCase("null")) 92 if (datafile != null && !datafile.equalsIgnoreCase("null"))
88 importData(datafile.split(ImportDataFileSeparator)); 93 importData(datafile.split(ImportDataFileSeparator));
89 } 94 }
90 95
91 public void importData(String[] datafiles) { 96 public void importData(String[] datafiles) {
92 if (datafiles != null) { 97 if (datafiles != null) {
93 for (String datafile: datafiles) { 98 for (String datafile: datafiles) {
94 File file = new File(datafile); 99 File file = new File(datafile);
95 if (file.exists()) { 100 if (file.exists()) {
96 if (file.isFile()) importDataFile(file); 101 if (file.isFile()) importDataFile(file);
97 else importDataDirectory(file); 102 else importDataDirectory(file);
98 } 103 }
99 else { 104 else {
100 Utility.logError("warning: file " + datafile + " doesn't exists."); 105 Utility.logError("warning: file " + datafile + " doesn't exists.");
101 } 106 }
102 } 107 }
103 } 108 }
@@ -115,136 +120,128 @@ public abstract class QueryReasoner {
115 datafile = file.getCanonicalPath(); 120 datafile = file.getCanonicalPath();
116 } catch (IOException e) { 121 } catch (IOException e) {
117 e.printStackTrace(); 122 e.printStackTrace();
118 return ; 123 return;
119 } 124 }
120 importDataFile(datafile); 125 importDataFile(datafile);
121 } 126 }
122 127
123 protected final void importDataFile(String datafile) { 128 protected final void importDataFile(String datafile) {
124 if (importedData.length() == 0) 129 if (importedData.length() == 0)
125 importedData.append(datafile); 130 importedData.append(datafile);
126 else 131 else
127 importedData.append(ImportDataFileSeparator).append(datafile); 132 importedData.append(ImportDataFileSeparator).append(datafile);
128 133
129 } 134 }
130
131 public abstract void loadOntology(OWLOntology ontology);
132
133 public abstract boolean preprocess();
134 135
135 public abstract boolean isConsistent(); 136 public abstract void loadOntology(OWLOntology ontology);
136 137
137 public boolean fullReasoner = this instanceof MyQueryReasoner; 138 public abstract boolean preprocess();
138 139
139 public abstract void evaluate(QueryRecord record); 140 public abstract boolean isConsistent();
140 141
141 public abstract void evaluateUpper(QueryRecord record); 142 public abstract void evaluate(QueryRecord record);
143
144 public abstract void evaluateUpper(QueryRecord record);
142 145
143 public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) { 146 public AnswerTuples evaluate(String queryText, boolean forFacetGeneration) {
144 if (forFacetGeneration) { 147 if (forFacetGeneration) {
145 QueryRecord record = m_queryManager.create(queryText); 148 QueryRecord record = m_queryManager.create(queryText);
146 Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText); 149 Utility.logInfo("---------- start evaluating upper bound for Query " + record.getQueryID() + " ----------", queryText);
147 if (!record.processed()) 150 if(!record.isProcessed())
148 evaluateUpper(record); 151 evaluateUpper(record);
149// AnswerTuples tuples = record.getUpperBoundAnswers(); 152// AnswerTuples tuples = record.getUpperBoundAnswers();
150// for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) { 153// for (AnswerTuple tuple; tuples.isValid(); tuples.moveNext()) {
151// tuple = tuples.getTuple(); 154// tuple = tuples.getTuple();
152// if (tuple.toString().contains("NC")) 155// if (tuple.toString().contains("NC"))
153// System.out.println(tuple.toString()); 156// System.out.println(tuple.toString());
154// } 157// }
155 return record.getUpperBoundAnswers(); 158 return record.getUpperBoundAnswers();
156 } 159 } else
157 else 160 return evaluate(queryText);
158 return evaluate(queryText);
159 } 161 }
160 162
163// public void evaluate(Collection<QueryRecord> queryRecords) {
164// evaluate(queryRecords);
165// }
166
161 public AnswerTuples evaluate(String queryText) { 167 public AnswerTuples evaluate(String queryText) {
162 QueryRecord record = m_queryManager.create(queryText); 168 QueryRecord record = m_queryManager.create(queryText);
163 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); 169 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
164 if (!record.processed()) 170 if(!record.isProcessed())
165 evaluate(record); 171 evaluate(record);
166 AnswerTuples answer = record.getAnswers(); 172 AnswerTuples answer = record.getAnswers();
167 record.dispose(); 173 record.dispose();
168 return answer; 174 return answer;
169 175
170 } 176 }
171 177
172 public void evaluate_shell(String queryText) { 178 public void evaluate_shell(String queryText) {
173 QueryRecord record = m_queryManager.create(queryText); 179 QueryRecord record = m_queryManager.create(queryText);
174 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText); 180 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", queryText);
175 if (!record.processed()) 181 if(!record.isProcessed())
176 evaluate(record); 182 evaluate(record);
177 Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple()); 183 Utility.logInfo("Answers to this query: ", record.outputSoundAnswerTuple());
178 record.dispose(); 184 record.dispose();
179 185
180 } 186 }
181 187
182 public void evaluate(Collection<QueryRecord> queryRecords) { 188 public void evaluate(Collection<QueryRecord> queryRecords) {
183 evaluate(queryRecords, null);
184 }
185
186 BufferedWriter answerWriter = null;
187
188 public void evaluate(Collection<QueryRecord> queryRecords, String answerFile) {
189 if (!isConsistent()) { 189 if (!isConsistent()) {
190 Utility.logDebug("The ontology and dataset is inconsistent."); 190 Utility.logDebug("The ontology and dataset is inconsistent.");
191 return ; 191 return;
192 } 192 }
193 193
194 if (answerWriter == null && answerFile != null) { 194 if(properties.getAnswerPath() != null && answerWriter == null) {
195 try { 195 try {
196 answerWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(answerFile))); 196 answerWriter = Files.newBufferedWriter(Paths.get(properties.getAnswerPath()));
197 } catch (FileNotFoundException e) { 197 } catch (IOException e) {
198 Utility.logInfo("The answer file not found! " + answerFile); 198 Utility.logError("The answer path is not valid!");
199 return ; 199 e.printStackTrace();
200 } 200 }
201 } 201 }
202 202
203 Timer t = new Timer(); 203 Timer t = new Timer();
204 Gson gson = QueryRecord.GsonCreator.getInstance();
204 for (QueryRecord record: queryRecords) { 205 for (QueryRecord record: queryRecords) {
205// if (Integer.parseInt(record.getQueryID()) != 218) continue; 206// if (Integer.parseInt(record.getQueryID()) != 218) continue;
206 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------", 207 Utility.logInfo("---------- start evaluating Query " + record.getQueryID() + " ----------",
207 record.getQueryText()); 208 record.getQueryText());
208 if (!record.processed()) { 209 if(!record.isProcessed()) {
209 t.reset(); 210 t.reset();
210 if (!record.processed()) 211 if(!record.isProcessed())
211 evaluate(record); 212 evaluate(record);
212 Utility.logInfo("Total time to answer this query: " + t.duration()); 213 Utility.logInfo("Total time to answer this query: " + t.duration());
213 if (!fullReasoner && !record.processed()) { 214 if(!fullReasoner && !record.isProcessed()) {
214 Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds."); 215 Utility.logInfo("The query has not been fully answered in " + t.duration() + " seconds.");
215 continue; 216 continue;
216 } 217 }
217 } 218 }
218 // FIXME: change the argument below 219 record.outputAnswerStatistics();
219 try {
220 record.outputAnswers(answerWriter);
221 } catch (IOException e) {
222 Utility.logInfo("Error in outputing answers " + answerFile);
223 }
224 record.outputTimes(); 220 record.outputTimes();
225 record.dispose();
226 } 221 }
222 /* TODO it can handle one call only
223 if you call twice, you will end up with a json file with multiple roots */
224 if(answerWriter != null) gson.toJson(queryRecords, answerWriter);
225// queryRecords.stream().forEach(record -> Utility.logDebug(gson.toJson(record)));
226 queryRecords.stream().forEach(record -> record.dispose());
227 } 227 }
228 228
229 public void dispose() { 229 public void dispose() {
230 if (answerWriter != null) 230 if (answerWriter != null) {
231 try { 231 try {
232 answerWriter.close(); 232 answerWriter.close();
233 } catch (IOException e) { 233 } catch (IOException e) {
234 e.printStackTrace(); 234 e.printStackTrace();
235 } 235 }
236 Utility.cleanup(); 236 }
237 } 237// Utility.cleanup();
238 238 }
239 private QueryManager m_queryManager = new QueryManager();
240 239
241 public QueryManager getQueryManager() { 240 public QueryManager getQueryManager() {
242 return m_queryManager; 241 return m_queryManager;
243 } 242 }
244 243
245 244
246 public static QueryReasoner getHermiTReasoner(boolean toCheckSatisfiability) { 245 public enum Type {Full, RLU, ELHOU}
247 return new HermiTReasoner(toCheckSatisfiability); 246
248 }
249
250} 247}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java
index 3894874..bea5bbf 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLQueryReasoner.java
@@ -10,18 +10,17 @@ import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxQueryEngine;
10import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram; 10import uk.ac.ox.cs.pagoda.rules.LowerDatalogProgram;
11import uk.ac.ox.cs.pagoda.util.Timer; 11import uk.ac.ox.cs.pagoda.util.Timer;
12 12
13public class RLQueryReasoner extends QueryReasoner { 13class RLQueryReasoner extends QueryReasoner {
14 14
15 RDFoxQueryEngine rlLowerStore = null; 15 RDFoxQueryEngine rlLowerStore = null;
16 16
17 LowerDatalogProgram program; 17 LowerDatalogProgram program;
18 Timer t = new Timer();
18 19
19 public RLQueryReasoner() { 20 public RLQueryReasoner() {
20 rlLowerStore = new BasicQueryEngine("rl"); 21 rlLowerStore = new BasicQueryEngine("rl");
21 } 22 }
22 23
23 Timer t = new Timer();
24
25 @Override 24 @Override
26 public void evaluate(QueryRecord queryRecord) { 25 public void evaluate(QueryRecord queryRecord) {
27 AnswerTuples rlAnswer = null; 26 AnswerTuples rlAnswer = null;
@@ -32,8 +31,8 @@ public class RLQueryReasoner extends QueryReasoner {
32 } finally { 31 } finally {
33 if (rlAnswer != null) rlAnswer.dispose(); 32 if (rlAnswer != null) rlAnswer.dispose();
34 } 33 }
35 queryRecord.addProcessingTime(Step.LowerBound, t.duration()); 34 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
36 queryRecord.setDifficulty(Step.LowerBound); 35 queryRecord.setDifficulty(Step.LOWER_BOUND);
37 queryRecord.markAsProcessed(); 36 queryRecord.markAsProcessed();
38 } 37 }
39 38
@@ -56,10 +55,8 @@ public class RLQueryReasoner extends QueryReasoner {
56 public boolean preprocess() { 55 public boolean preprocess() {
57 rlLowerStore.importRDFData("data", importedData.toString()); 56 rlLowerStore.importRDFData("data", importedData.toString());
58 rlLowerStore.materialise("lower program", program.toString()); 57 rlLowerStore.materialise("lower program", program.toString());
59 58
60 if (!isConsistent()) 59 return isConsistent();
61 return false;
62 return true;
63 } 60 }
64 61
65 @Override 62 @Override
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java
index fe4022d..547140a 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/RLUQueryReasoner.java
@@ -1,7 +1,6 @@
1package uk.ac.ox.cs.pagoda.reasoner; 1package uk.ac.ox.cs.pagoda.reasoner;
2 2
3import org.semanticweb.owlapi.model.OWLOntology; 3import org.semanticweb.owlapi.model.OWLOntology;
4
5import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine; 4import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
6import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; 5import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator;
7import uk.ac.ox.cs.pagoda.query.AnswerTuples; 6import uk.ac.ox.cs.pagoda.query.AnswerTuples;
@@ -12,26 +11,25 @@ import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
12import uk.ac.ox.cs.pagoda.util.Timer; 11import uk.ac.ox.cs.pagoda.util.Timer;
13import uk.ac.ox.cs.pagoda.util.Utility; 12import uk.ac.ox.cs.pagoda.util.Utility;
14 13
15public class RLUQueryReasoner extends QueryReasoner { 14class RLUQueryReasoner extends QueryReasoner {
16 15
17 DatalogProgram program; 16 DatalogProgram program;
18 17
19 BasicQueryEngine rlLowerStore, rlUpperStore; 18 BasicQueryEngine rlLowerStore, rlUpperStore;
20 19
21 boolean multiStageTag, equalityTag; 20 boolean multiStageTag, equalityTag;
21 Timer t = new Timer();
22 22
23 public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) { 23 public RLUQueryReasoner(boolean multiStageTag, boolean considerEqualities) {
24 this.multiStageTag = multiStageTag; 24 this.multiStageTag = multiStageTag;
25 this.equalityTag = considerEqualities; 25 this.equalityTag = considerEqualities;
26 rlLowerStore = new BasicQueryEngine("rl-lower-bound"); 26 rlLowerStore = new BasicQueryEngine("rl-lower-bound");
27 if (!multiStageTag) 27 if(!multiStageTag)
28 rlUpperStore = new BasicQueryEngine("rl-upper-bound"); 28 rlUpperStore = new BasicQueryEngine("rl-upper-bound");
29 else 29 else
30 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false); 30 rlUpperStore = new MultiStageQueryEngine("rl-upper-bound", false);
31 } 31 }
32 32
33 Timer t = new Timer();
34
35 @Override 33 @Override
36 public void evaluate(QueryRecord queryRecord) { 34 public void evaluate(QueryRecord queryRecord) {
37 AnswerTuples ans = null; 35 AnswerTuples ans = null;
@@ -43,7 +41,7 @@ public class RLUQueryReasoner extends QueryReasoner {
43 } finally { 41 } finally {
44 if (ans != null) ans.dispose(); 42 if (ans != null) ans.dispose();
45 } 43 }
46 queryRecord.addProcessingTime(Step.LowerBound, t.duration()); 44 queryRecord.addProcessingTime(Step.LOWER_BOUND, t.duration());
47 45
48 ans = null; 46 ans = null;
49 t.reset(); 47 t.reset();
@@ -54,10 +52,10 @@ public class RLUQueryReasoner extends QueryReasoner {
54 } finally { 52 } finally {
55 if (ans != null) ans.dispose(); 53 if (ans != null) ans.dispose();
56 } 54 }
57 queryRecord.addProcessingTime(Step.UpperBound, t.duration()); 55 queryRecord.addProcessingTime(Step.UPPER_BOUND, t.duration());
58 56
59 if (queryRecord.processed()) 57 if(queryRecord.isProcessed())
60 queryRecord.setDifficulty(Step.UpperBound); 58 queryRecord.setDifficulty(Step.UPPER_BOUND);
61 } 59 }
62 60
63 @Override 61 @Override
@@ -100,11 +98,9 @@ public class RLUQueryReasoner extends QueryReasoner {
100 98
101 rlUpperStore.importRDFData("data", datafile); 99 rlUpperStore.importRDFData("data", datafile);
102 rlUpperStore.materialiseRestrictedly(program, null); 100 rlUpperStore.materialiseRestrictedly(program, null);
103
104 if (!isConsistent())
105 return false;
106 101
107 return true; 102 return isConsistent();
103
108 } 104 }
109 105
110 @Override 106 @Override
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
index c22902c..05e399e 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/DLPredicateComparator.java
@@ -1,9 +1,9 @@
1package uk.ac.ox.cs.pagoda.reasoner.light; 1package uk.ac.ox.cs.pagoda.reasoner.light;
2 2
3import java.util.Comparator;
4
5import uk.ac.ox.cs.pagoda.multistage.Normalisation; 3import uk.ac.ox.cs.pagoda.multistage.Normalisation;
6import uk.ac.ox.cs.pagoda.rules.OverApproxExist; 4import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
5
6import java.util.Comparator;
7 7
8public class DLPredicateComparator implements Comparator<String> { 8public class DLPredicateComparator implements Comparator<String> {
9 9
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
index f70dde9..f068164 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/KarmaQueryEngine.java
@@ -1,20 +1,22 @@
1package uk.ac.ox.cs.pagoda.reasoner.light; 1package uk.ac.ox.cs.pagoda.reasoner.light;
2 2
3import java.io.File; 3import org.semanticweb.karma2.MyKarma;
4import java.io.FileNotFoundException;
5import java.util.*;
6
7import org.semanticweb.karma2.*;
8import org.semanticweb.karma2.clausifier.OntologyProcesser; 4import org.semanticweb.karma2.clausifier.OntologyProcesser;
9import org.semanticweb.karma2.exception.IllegalInputOntologyException; 5import org.semanticweb.karma2.exception.IllegalInputOntologyException;
10import org.semanticweb.karma2.model.ConjunctiveQuery; 6import org.semanticweb.karma2.model.ConjunctiveQuery;
11import org.semanticweb.owlapi.model.OWLOntology; 7import org.semanticweb.owlapi.model.OWLOntology;
12
13import uk.ac.ox.cs.pagoda.query.*;
14import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
15import uk.ac.ox.cs.pagoda.util.Utility;
16import uk.ac.ox.cs.JRDFox.JRDFStoreException; 8import uk.ac.ox.cs.JRDFox.JRDFStoreException;
17import uk.ac.ox.cs.JRDFox.store.DataStore; 9import uk.ac.ox.cs.JRDFox.store.DataStore;
10import uk.ac.ox.cs.pagoda.query.AnswerTuple;
11import uk.ac.ox.cs.pagoda.query.AnswerTuples;
12import uk.ac.ox.cs.pagoda.query.AnswerTuplesImp;
13import uk.ac.ox.cs.pagoda.util.ConjunctiveQueryHelper;
14import uk.ac.ox.cs.pagoda.util.Utility;
15
16import java.io.File;
17import java.io.FileNotFoundException;
18import java.nio.file.Paths;
19import java.util.Set;
18 20
19public class KarmaQueryEngine extends RDFoxQueryEngine { 21public class KarmaQueryEngine extends RDFoxQueryEngine {
20 22
@@ -29,8 +31,8 @@ public class KarmaQueryEngine extends RDFoxQueryEngine {
29// int index = (new Random().nextInt() % Base + Base) % Base; 31// int index = (new Random().nextInt() % Base + Base) % Base;
30// karmaDataFile = "karma_data" + index + ".ttl"; 32// karmaDataFile = "karma_data" + index + ".ttl";
31// karmaRuleFile = "karma_rule" + index + ".dlog"; 33// karmaRuleFile = "karma_rule" + index + ".dlog";
32 karmaDataFile = Utility.TempDirectory + "karma_data.ttl"; 34 karmaDataFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_data.ttl").toString();
33 karmaRuleFile = Utility.TempDirectory + "karma_rule.dlog"; 35 karmaRuleFile = Paths.get(Utility.getGlobalTempDirAbsolutePath(), "karma_rule.dlog").toString();
34 36
35 reasoner = new MyKarma(); 37 reasoner = new MyKarma();
36 } 38 }
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
index 70d0cc9..61500f5 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java
@@ -1,8 +1,11 @@
1package uk.ac.ox.cs.pagoda.reasoner.light; 1package uk.ac.ox.cs.pagoda.reasoner.light;
2 2
3import java.io.File; 3import org.semanticweb.owlapi.model.OWLOntology;
4import java.util.Collection; 4import org.semanticweb.owlapi.model.OWLOntologyCreationException;
5 5import uk.ac.ox.cs.JRDFox.JRDFStoreException;
6import uk.ac.ox.cs.JRDFox.Prefixes;
7import uk.ac.ox.cs.JRDFox.store.DataStore;
8import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType;
6import uk.ac.ox.cs.pagoda.MyPrefixes; 9import uk.ac.ox.cs.pagoda.MyPrefixes;
7import uk.ac.ox.cs.pagoda.query.AnswerTuples; 10import uk.ac.ox.cs.pagoda.query.AnswerTuples;
8import uk.ac.ox.cs.pagoda.reasoner.QueryEngine; 11import uk.ac.ox.cs.pagoda.reasoner.QueryEngine;
@@ -10,36 +13,51 @@ import uk.ac.ox.cs.pagoda.reasoner.QueryReasoner;
10import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter; 13import uk.ac.ox.cs.pagoda.tracking.AnswerTuplesWriter;
11import uk.ac.ox.cs.pagoda.util.Timer; 14import uk.ac.ox.cs.pagoda.util.Timer;
12import uk.ac.ox.cs.pagoda.util.Utility; 15import uk.ac.ox.cs.pagoda.util.Utility;
13import uk.ac.ox.cs.JRDFox.JRDFStoreException; 16
14import uk.ac.ox.cs.JRDFox.Prefixes; 17import java.io.File;
15import uk.ac.ox.cs.JRDFox.store.DataStore; 18import java.util.Collection;
16import uk.ac.ox.cs.JRDFox.store.DataStore.StoreType;
17 19
18public abstract class RDFoxQueryEngine implements QueryEngine { 20public abstract class RDFoxQueryEngine implements QueryEngine {
19 21
20 public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2; 22 public static final int matNoOfThreads = Runtime.getRuntime().availableProcessors() * 2;
21 23 protected String name;
22 protected String name;
23 protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes(); 24 protected Prefixes prefixes = MyPrefixes.PAGOdAPrefixes.getRDFoxPrefixes();
24 25
25 public RDFoxQueryEngine(String name) { 26 public RDFoxQueryEngine(String name) {
26 this.name = name; 27 this.name = name;
27 } 28 }
28 29
29 public abstract DataStore getDataStore(); 30 public static DataStore createDataStore() {
31 DataStore instance = null;
32 try {
33// instance = new DataStore("par-head-n");
34 instance = new DataStore(StoreType.NarrowParallelHead);
35 instance.setNumberOfThreads(matNoOfThreads);
36 instance.initialize();
37 } catch(JRDFStoreException e) {
38 e.printStackTrace();
39 }
40 return instance;
41 }
30 42
31 public abstract void dispose(); 43 public String getName() {
44 return name;
45 }
32 46
47 public abstract DataStore getDataStore();
48
49 public abstract void dispose();
50
33 public void importRDFData(String fileName, String importedFile) { 51 public void importRDFData(String fileName, String importedFile) {
34 if (importedFile == null || importedFile.isEmpty()) return ; 52 if(importedFile == null || importedFile.isEmpty()) return;
35 Timer t = new Timer(); 53 Timer t = new Timer();
36 DataStore store = getDataStore(); 54 DataStore store = getDataStore();
37 try { 55 try {
38 long oldTripleCount = store.getTriplesCount(), tripleCount; 56 long oldTripleCount = store.getTriplesCount(), tripleCount;
39 for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) { 57 for (String file: importedFile.split(QueryReasoner.ImportDataFileSeparator)) {
40 store.importTurtleFile(new File(file), prefixes); 58 store.importTurtleFile(new File(file), prefixes);
41 } 59 }
42 tripleCount = store.getTriplesCount(); 60 tripleCount = store.getTriplesCount();
43 Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); 61 Utility.logDebug(name + " store after importing " + fileName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
44 store.clearRulesAndMakeFactsExplicit(); 62 store.clearRulesAndMakeFactsExplicit();
45 } catch (JRDFStoreException e) { 63 } catch (JRDFStoreException e) {
@@ -47,17 +65,32 @@ public abstract class RDFoxQueryEngine implements QueryEngine {
47 } 65 }
48 Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds."); 66 Utility.logDebug(name + " store finished importing " + fileName + " in " + t.duration() + " seconds.");
49 } 67 }
50 68
69 public void importDataFromABoxOf(OWLOntology ontology) {
70 DataStore store = getDataStore();
71 try {
72 long prevTriplesCount = store.getTriplesCount();
73 store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true)));
74 long loadedTriples = store.getTriplesCount() - prevTriplesCount;
75 Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true)
76 .size() + " ABox axioms");
77 } catch(JRDFStoreException | OWLOntologyCreationException e) {
78 e.printStackTrace();
79 System.exit(1);
80 }
81
82 }
83
51 public void materialise(String programName, String programText) { 84 public void materialise(String programName, String programText) {
52 if (programText == null) return ; 85 if(programText == null) return;
53 Timer t = new Timer(); 86 Timer t = new Timer();
54 DataStore store = getDataStore(); 87 DataStore store = getDataStore();
55 try { 88 try {
56 long oldTripleCount = store.getTriplesCount(), tripleCount; 89 long oldTripleCount = store.getTriplesCount(), tripleCount;
57// store.addRules(new String[] {programText}); 90// store.addRules(new String[] {programText});
58 store.importRules(programText); 91 store.importRules(programText);
59 store.applyReasoning(); 92 store.applyReasoning();
60 tripleCount = store.getTriplesCount(); 93 tripleCount = store.getTriplesCount();
61 Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)"); 94 Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)");
62 store.clearRulesAndMakeFactsExplicit(); 95 store.clearRulesAndMakeFactsExplicit();
63 } catch (JRDFStoreException e) { 96 } catch (JRDFStoreException e) {
@@ -70,17 +103,17 @@ public abstract class RDFoxQueryEngine implements QueryEngine {
70 public void evaluate(Collection<String> queryTexts, String answerFile) { 103 public void evaluate(Collection<String> queryTexts, String answerFile) {
71 if (queryTexts == null) 104 if (queryTexts == null)
72 return ; 105 return ;
73 106
74 int queryID = 0; 107 int queryID = 0;
75 AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile); 108 AnswerTuplesWriter answerWriter = new AnswerTuplesWriter(answerFile);
76 AnswerTuples answerTuples; 109 AnswerTuples answerTuples;
77 Timer t = new Timer(); 110 Timer t = new Timer();
78 try { 111 try {
79 for (String query: queryTexts) { 112 for (String query: queryTexts) {
80 t.reset(); 113 t.reset();
81 answerTuples = null; 114 answerTuples = null;
82 try { 115 try {
83 answerTuples = evaluate(query); 116 answerTuples = evaluate(query);
84 Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration()); 117 Utility.logDebug("time to answer Query " + ++queryID + ": " + t.duration());
85 answerWriter.write(answerTuples.getAnswerVariables(), answerTuples); 118 answerWriter.write(answerTuples.getAnswerVariables(), answerTuples);
86 } finally { 119 } finally {
@@ -90,22 +123,9 @@ public abstract class RDFoxQueryEngine implements QueryEngine {
90 } finally { 123 } finally {
91 answerWriter.close(); 124 answerWriter.close();
92 } 125 }
93 126
94 Utility.logDebug("done computing query answers by RDFox."); 127 Utility.logDebug("done computing query answers by RDFox.");
95 128
96 }
97
98 public static DataStore createDataStore() {
99 DataStore instance = null;
100 try {
101// instance = new DataStore("par-head-n");
102 instance = new DataStore(StoreType.NarrowParallelHead);
103 instance.setNumberOfThreads(matNoOfThreads);
104 instance.initialize();
105 } catch (JRDFStoreException e) {
106 e.printStackTrace();
107 }
108 return instance;
109 } 129 }
110 130
111} 131}
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
index c2065dc..85f8ef9 100644
--- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
+++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxTripleManager.java
@@ -1,35 +1,33 @@
1package uk.ac.ox.cs.pagoda.reasoner.light; 1package uk.ac.ox.cs.pagoda.reasoner.light;
2 2
3import java.util.Collection; 3import net.sf.ehcache.Cache;
4import java.util.HashMap; 4import net.sf.ehcache.CacheManager;
5import java.util.LinkedList; 5import net.sf.ehcache.Element;
6import java.util.Map; 6import org.semanticweb.HermiT.model.*;
7import java.util.Queue;
8import org.semanticweb.HermiT.model.AnnotatedEquality;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicConcept;
11import org.semanticweb.HermiT.model.AtomicRole;
12import org.semanticweb.HermiT.model.Constant;
13import org.semanticweb.HermiT.model.DLPredicate;
14import org.semanticweb.HermiT.model.Equality;
15import org.semanticweb.HermiT.model.Individual;
16import org.semanticweb.HermiT.model.Inequality;
17import org.semanticweb.HermiT.model.Term;
18import org.semanticweb.HermiT.model.Variable;
19
20import uk.ac.ox.cs.pagoda.owl.OWLHelper;
21import uk.ac.ox.cs.pagoda.util.Namespace;
22import uk.ac.ox.cs.JRDFox.JRDFStoreException; 7import uk.ac.ox.cs.JRDFox.JRDFStoreException;
8import uk.ac.ox.cs.JRDFox.model.Datatype;
23import uk.ac.ox.cs.JRDFox.model.GroundTerm; 9import uk.ac.ox.cs.JRDFox.model.GroundTerm;
24import uk.ac.ox.cs.JRDFox.store.DataStore; 10import uk.ac.ox.cs.JRDFox.store.DataStore;
25import uk.ac.ox.cs.JRDFox.model.Datatype;
26import uk.ac.ox.cs.JRDFox.store.Dictionary;
27import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType; 11import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
12import uk.ac.ox.cs.JRDFox.store.Dictionary;
28import uk.ac.ox.cs.JRDFox.store.Resource; 13import uk.ac.ox.cs.JRDFox.store.Resource;
14import uk.ac.ox.cs.pagoda.owl.OWLHelper;
15import uk.ac.ox.cs.pagoda.util.Namespace;
16
17import java.util.Collection;
18import java.util.HashMap;
19import java.util.Map;
29 20
30public class RDFoxTripleManager { 21public class RDFoxTripleManager {
31 22
32 UpdateType m_incrementally; 23 private final Cache termsCache;
24 private static final int TERMS_CACHE_SIZE = 10000;
25 private static final int CACHE_TTL_DEFAULT = 0;
26 private static final int CACHE_TTI_DEFAULT = 0;
27 private static final boolean CACHE_ETERNAL = true;
28 private static final boolean CACHE_USE_DISK = false;
29
30 UpdateType m_incrementally;
33// boolean m_incrementally; 31// boolean m_incrementally;
34 32
35 DataStore m_store; 33 DataStore m_store;
@@ -38,7 +36,19 @@ public class RDFoxTripleManager {
38 36
39 public RDFoxTripleManager(DataStore store, boolean incrementally) { 37 public RDFoxTripleManager(DataStore store, boolean incrementally) {
40 m_store = store; 38 m_store = store;
41// m_incrementally = incrementally; 39// m_incrementally = incrementally;
40
41 CacheManager cacheManager = CacheManager.getInstance();
42 String cacheName = "RDFoxTripleManager_" + store.hashCode();
43 if(! cacheManager.cacheExists(cacheName)) {
44 termsCache = new Cache(cacheName,
45 TERMS_CACHE_SIZE, CACHE_USE_DISK, CACHE_ETERNAL,
46 CACHE_TTL_DEFAULT, CACHE_TTI_DEFAULT);
47 cacheManager.addCache(termsCache);
48 }
49 else
50 termsCache = cacheManager.getCache(cacheName);
51
42 if (incrementally) 52 if (incrementally)
43 m_incrementally = UpdateType.ScheduleForAddition; 53 m_incrementally = UpdateType.ScheduleForAddition;
44 else 54 else
@@ -178,28 +188,25 @@ public class RDFoxTripleManager {
178 return m_dict.resolveResources(lexicalForms, types)[0]; 188 return m_dict.resolveResources(lexicalForms, types)[0];
179 } 189 }
180 190
181 Map<Term, Integer> termCache = new HashMap<Term, Integer>(); 191// Map<Term, Integer> termCache = new HashMap<Term, Integer>();
182 Queue<Term> termList = new LinkedList<Term>(); 192// Queue<Term> termQueue = new LinkedList<Term>();
183 int sizeLimit = 10000;
184 193
185 private int getResourceID(Term arg, Map<Variable, Integer> assignment) { 194 private int getResourceID(Term arg, Map<Variable, Integer> assignment) {
186 while (termCache.size() > sizeLimit) 195 if (arg instanceof Variable) return assignment.get(arg);
187 termCache.remove(termList.poll()); 196 int id = -1;
188 197 if(termsCache.isKeyInCache(arg))
189 if (arg instanceof Variable) return assignment.get((Variable) arg); 198 return ((int) termsCache.get(arg).getObjectValue());
190 Integer id = null; 199
191 if ((id = termCache.get(arg)) != null)
192 return id;
193
194// if (arg instanceof Individual) { 200// if (arg instanceof Individual) {
195 try { 201 try {
196 if (arg instanceof Individual) 202 if (arg instanceof Individual)
197 termCache.put(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())); 203 termsCache.put(new Element(arg, id = resolveResource(((Individual) arg).getIRI(), Datatype.IRI_REFERENCE.value())));
198 else if (arg instanceof Constant) 204 else if (arg instanceof Constant)
199 termCache.put(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))); 205 termsCache.put(new Element(arg, id = resolveResource(((Constant) arg).getLexicalForm(), getDatatypeID(((Constant) arg).getDatatypeURI()))));
200 206
201 } catch (JRDFStoreException e) { 207 } catch (JRDFStoreException e) {
202 e.printStackTrace(); 208 e.printStackTrace();
209 System.exit(1);
203 } 210 }
204// } 211// }
205 212
diff --git a/src/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java b/src/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java
index 3b9d6fc..acbf354 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/ApproxProgram.java
@@ -1,27 +1,22 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import java.util.Collection;
4import java.util.HashMap;
5import java.util.HashSet;
6import java.util.Iterator;
7import java.util.Map;
8
9import org.semanticweb.HermiT.model.DLClause; 3import org.semanticweb.HermiT.model.DLClause;
10import org.semanticweb.owlapi.model.OWLAxiom; 4import org.semanticweb.owlapi.model.OWLAxiom;
11import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; 5import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom;
12import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; 6import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom;
13
14import uk.ac.ox.cs.pagoda.owl.OWLHelper; 7import uk.ac.ox.cs.pagoda.owl.OWLHelper;
8import uk.ac.ox.cs.pagoda.rules.approximators.Approximator;
9
10import java.util.*;
15 11
16public abstract class ApproxProgram extends Program { 12public abstract class ApproxProgram extends Program {
17 13
14 protected Approximator m_approx = null;
18 /** 15 /**
19 * mapping from over-approximated DLClauses to DLClauses from the original ontology 16 * mapping from over-approximated DLClauses to DLClauses from the original ontology
20 */ 17 */
21 Map<DLClause, Object> correspondence = new HashMap<DLClause, Object>(); 18 Map<DLClause, Object> correspondence = new HashMap<DLClause, Object>();
22 19
23 protected Approximator m_approx = null;
24
25 protected ApproxProgram() { initApproximator(); } 20 protected ApproxProgram() { initApproximator(); }
26 21
27 protected abstract void initApproximator(); 22 protected abstract void initApproximator();
@@ -76,7 +71,7 @@ public abstract class ApproxProgram extends Program {
76 71
77 public OWLAxiom getEquivalentAxiom(DLClause clause) { 72 public OWLAxiom getEquivalentAxiom(DLClause clause) {
78 Object obj = correspondence.get(clause); 73 Object obj = correspondence.get(clause);
79 while (obj != null && obj instanceof DLClause && !obj.equals(clause) && correspondence.containsKey((DLClause) obj)) 74 while (obj != null && obj instanceof DLClause && !obj.equals(clause) && correspondence.containsKey(obj))
80 obj = correspondence.get(clause); 75 obj = correspondence.get(clause);
81 if (obj instanceof OWLAxiom) 76 if (obj instanceof OWLAxiom)
82 return (OWLAxiom) obj; 77 return (OWLAxiom) obj;
@@ -98,14 +93,14 @@ public abstract class ApproxProgram extends Program {
98 93
99class ClauseSet extends HashSet<DLClause> { 94class ClauseSet extends HashSet<DLClause> {
100 95
101 public ClauseSet(DLClause first, DLClause second) {
102 add(first);
103 add(second);
104 }
105
106 /** 96 /**
107 * 97 *
108 */ 98 */
109 private static final long serialVersionUID = 1L; 99 private static final long serialVersionUID = 1L;
110 100
101 public ClauseSet(DLClause first, DLClause second) {
102 add(first);
103 add(second);
104 }
105
111} \ No newline at end of file 106} \ No newline at end of file
diff --git a/src/uk/ac/ox/cs/pagoda/rules/Approximator.java b/src/uk/ac/ox/cs/pagoda/rules/Approximator.java
deleted file mode 100644
index b2edd56..0000000
--- a/src/uk/ac/ox/cs/pagoda/rules/Approximator.java
+++ /dev/null
@@ -1,62 +0,0 @@
1package uk.ac.ox.cs.pagoda.rules;
2
3import java.util.Collection;
4import java.util.LinkedList;
5
6import org.semanticweb.HermiT.model.AtLeast;
7import org.semanticweb.HermiT.model.Atom;
8import org.semanticweb.HermiT.model.DLClause;
9import org.semanticweb.HermiT.model.DLPredicate;
10
11public interface Approximator {
12
13 public Collection<DLClause> convert(DLClause clause, DLClause originalClause);
14
15}
16
17class IgnoreExist implements Approximator {
18
19 @Override
20 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
21 Collection<DLClause> ret = new LinkedList<DLClause>();
22 DLPredicate p;
23 for (Atom headAtom: clause.getHeadAtoms()) {
24 p = headAtom.getDLPredicate();
25 if (p instanceof AtLeast) return ret;
26 }
27
28 ret.add(clause);
29 return ret;
30 }
31
32}
33
34class IgnoreBoth implements Approximator {
35
36 @Override
37 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
38 Collection<DLClause> ret = new LinkedList<DLClause>();
39
40 if (clause.getHeadLength() > 1) return ret;
41
42 if (clause.getHeadLength() > 0) {
43 DLPredicate predicate = clause.getHeadAtom(0).getDLPredicate();
44
45 if (predicate instanceof AtLeast) return ret;
46 }
47
48 ret.add(clause);
49 return ret;
50 }
51}
52
53class IgnoreDisj implements Approximator {
54
55 @Override
56 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
57 Collection<DLClause> ret = new LinkedList<DLClause>();
58 if (clause.getHeadLength() > 1) return ret;
59 ret.add(clause);
60 return ret;
61 }
62}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java b/src/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java
index 6ebe666..d50c2d4 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/DisjunctiveProgram.java
@@ -1,10 +1,12 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
4
3public class DisjunctiveProgram extends UpperProgram { 5public class DisjunctiveProgram extends UpperProgram {
4 6
5 @Override 7 @Override
6 protected void initApproximator() { 8 protected void initApproximator() {
7 m_approx = new OverApproxExist(); 9 m_approx = new OverApproxExist();
8 } 10 }
9 11
10// @Override 12// @Override
diff --git a/src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java
new file mode 100644
index 0000000..a7afa2e
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/rules/ExistConstantApproximator.java
@@ -0,0 +1,26 @@
1package uk.ac.ox.cs.pagoda.rules;
2
3import org.semanticweb.HermiT.model.DLClause;
4import org.semanticweb.HermiT.model.Individual;
5import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
6import uk.ac.ox.cs.pagoda.rules.approximators.TupleDependentApproximator;
7import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
8
9import java.util.Collection;
10
11/**
12 * A wrapper for <tt>OverApproxExist</tt>.
13 * */
14public class ExistConstantApproximator implements TupleDependentApproximator {
15
16 private final OverApproxExist overApproxExist;
17
18 public ExistConstantApproximator() {
19 overApproxExist = new OverApproxExist();
20 }
21
22 @Override
23 public Collection<DLClause> convert(DLClause clause, DLClause originalClause, Collection<Tuple<Individual>> violationTuples) {
24 return overApproxExist.convert(clause, originalClause);
25 }
26}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java b/src/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java
index 64d018f..e825917 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/ExistentialProgram.java
@@ -1,5 +1,7 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxDisj;
4
3public class ExistentialProgram extends UpperProgram { 5public class ExistentialProgram extends UpperProgram {
4 6
5// @Override 7// @Override
@@ -12,7 +14,7 @@ public class ExistentialProgram extends UpperProgram {
12 14
13 @Override 15 @Override
14 protected void initApproximator() { 16 protected void initApproximator() {
15 m_approx = new OverApproxDisj(); 17 m_approx = new OverApproxDisj();
16 } 18 }
17 19
18} 20}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java b/src/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java
index ebe0b7d..2098f73 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/ExistentialToDisjunctive.java
@@ -1,19 +1,17 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import org.semanticweb.HermiT.model.*;
4import org.semanticweb.owlapi.model.OWLObjectProperty;
5import org.semanticweb.owlapi.model.OWLOntology;
6import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
7import uk.ac.ox.cs.pagoda.rules.approximators.Approximator;
8import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
9
3import java.util.Collection; 10import java.util.Collection;
4import java.util.HashSet; 11import java.util.HashSet;
5import java.util.LinkedList; 12import java.util.LinkedList;
6import java.util.Set; 13import java.util.Set;
7 14
8import org.semanticweb.HermiT.model.AtLeastConcept;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicRole;
11import org.semanticweb.HermiT.model.DLClause;
12import org.semanticweb.HermiT.model.DLPredicate;
13import org.semanticweb.owlapi.model.OWLObjectProperty;
14import org.semanticweb.owlapi.model.OWLOntology;
15import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
16
17public class ExistentialToDisjunctive extends UpperProgram { 15public class ExistentialToDisjunctive extends UpperProgram {
18 16
19 Set<String> inverseFuncProperties = new HashSet<String>(); 17 Set<String> inverseFuncProperties = new HashSet<String>();
diff --git a/src/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java b/src/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java
index 6e17f02..199d167 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/LowerDatalogProgram.java
@@ -1,32 +1,24 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import java.util.Collection;
4import java.util.Iterator;
5import java.util.LinkedList;
6import java.util.Set;
7
8import org.semanticweb.HermiT.Reasoner; 3import org.semanticweb.HermiT.Reasoner;
9import org.semanticweb.HermiT.model.Atom; 4import org.semanticweb.HermiT.model.*;
10import org.semanticweb.HermiT.model.AtomicConcept; 5import org.semanticweb.owlapi.model.*;
11import org.semanticweb.HermiT.model.AtomicRole;
12import org.semanticweb.HermiT.model.DLClause;
13import org.semanticweb.HermiT.model.Variable;
14import org.semanticweb.owlapi.model.OWLClass;
15import org.semanticweb.owlapi.model.OWLObjectInverseOf;
16import org.semanticweb.owlapi.model.OWLObjectProperty;
17import org.semanticweb.owlapi.model.OWLObjectPropertyExpression;
18import org.semanticweb.owlapi.model.OWLOntology;
19import org.semanticweb.owlapi.reasoner.Node; 6import org.semanticweb.owlapi.reasoner.Node;
20
21import uk.ac.ox.cs.pagoda.constraints.BottomStrategy; 7import uk.ac.ox.cs.pagoda.constraints.BottomStrategy;
22import uk.ac.ox.cs.pagoda.constraints.NullaryBottom; 8import uk.ac.ox.cs.pagoda.constraints.NullaryBottom;
23import uk.ac.ox.cs.pagoda.constraints.UnaryBottom; 9import uk.ac.ox.cs.pagoda.constraints.UnaryBottom;
24import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom; 10import uk.ac.ox.cs.pagoda.constraints.UpperUnaryBottom;
25import uk.ac.ox.cs.pagoda.multistage.Normalisation; 11import uk.ac.ox.cs.pagoda.multistage.Normalisation;
26import uk.ac.ox.cs.pagoda.multistage.RestrictedApplication; 12import uk.ac.ox.cs.pagoda.multistage.RestrictedApplication;
13import uk.ac.ox.cs.pagoda.rules.approximators.Approximator;
27import uk.ac.ox.cs.pagoda.util.Timer; 14import uk.ac.ox.cs.pagoda.util.Timer;
28import uk.ac.ox.cs.pagoda.util.Utility; 15import uk.ac.ox.cs.pagoda.util.Utility;
29 16
17import java.util.Collection;
18import java.util.Iterator;
19import java.util.LinkedList;
20import java.util.Set;
21
30public class LowerDatalogProgram extends ApproxProgram implements IncrementalProgram { 22public class LowerDatalogProgram extends ApproxProgram implements IncrementalProgram {
31 23
32 boolean m_toClassify; 24 boolean m_toClassify;
@@ -68,7 +60,7 @@ public class LowerDatalogProgram extends ApproxProgram implements IncrementalPro
68 norm.process(); 60 norm.process();
69 for (DLClause nClause: norm.getNormlisedClauses()) { 61 for (DLClause nClause: norm.getNormlisedClauses()) {
70 if (nClause.getHeadLength() != 1) 62 if (nClause.getHeadLength() != 1)
71 for (DLClause newClause: RestrictedApplication.addAddtionalDatalogRules(nClause, tBottom, norm)) { 63 for (DLClause newClause: RestrictedApplication.addAdditionalDatalogRules(nClause, tBottom, norm)) {
72// System.out.println(newClause); 64// System.out.println(newClause);
73 if (newClause.getHeadAtom(0).getDLPredicate() instanceof AtomicConcept || newClause.getHeadAtom(0).getDLPredicate() instanceof AtomicRole) { 65 if (newClause.getHeadAtom(0).getDLPredicate() instanceof AtomicConcept || newClause.getHeadAtom(0).getDLPredicate() instanceof AtomicRole) {
74// System.out.println(newClause); 66// System.out.println(newClause);
@@ -115,7 +107,26 @@ public class LowerDatalogProgram extends ApproxProgram implements IncrementalPro
115 107
116 @Override 108 @Override
117 protected void initApproximator() { 109 protected void initApproximator() {
118 m_approx = new IgnoreBoth(); 110 m_approx = new IgnoreBoth();
111 }
112
113 private class IgnoreBoth implements Approximator {
114
115 @Override
116 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
117 Collection<DLClause> ret = new LinkedList<DLClause>();
118
119 if (clause.getHeadLength() > 1) return ret;
120
121 if (clause.getHeadLength() > 0) {
122 DLPredicate predicate = clause.getHeadAtom(0).getDLPredicate();
123
124 if (predicate instanceof AtLeast) return ret;
125 }
126
127 ret.add(clause);
128 return ret;
129 }
119 } 130 }
120 131
121} 132}
@@ -125,15 +136,13 @@ class ClassifyThread extends Thread {
125 IncrementalProgram m_program; 136 IncrementalProgram m_program;
126 Collection<DLClause> clauses = new LinkedList<DLClause>(); 137 Collection<DLClause> clauses = new LinkedList<DLClause>();
127 138
128 Variable X = Variable.create("X"), Y = Variable.create("Y"); 139 Variable X = Variable.create("X"), Y = Variable.create("Y");
129 140 Reasoner hermitReasoner;
141 OWLOntology ontology;
130 ClassifyThread(IncrementalProgram program) { 142 ClassifyThread(IncrementalProgram program) {
131 m_program = program; 143 m_program = program;
132 } 144 }
133 145
134 Reasoner hermitReasoner;
135 OWLOntology ontology;
136
137 @Override 146 @Override
138 public void run() { 147 public void run() {
139 ontology = m_program.getOntology(); 148 ontology = m_program.getOntology();
@@ -224,5 +233,4 @@ class ClassifyThread extends Thread {
224 private Atom getAtom(OWLClass c) { 233 private Atom getAtom(OWLClass c) {
225 return Atom.create(AtomicConcept.create(c.toStringID()), X); 234 return Atom.create(AtomicConcept.create(c.toStringID()), X);
226 } 235 }
227
228} \ No newline at end of file 236} \ No newline at end of file
diff --git a/src/uk/ac/ox/cs/pagoda/rules/Program.java b/src/uk/ac/ox/cs/pagoda/rules/Program.java
index 83cd21a..2e5302b 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/Program.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/Program.java
@@ -1,39 +1,10 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import java.io.BufferedWriter;
4import java.io.File;
5import java.io.FileNotFoundException;
6import java.io.FileOutputStream;
7import java.io.IOException;
8import java.io.OutputStreamWriter;
9import java.util.Collection;
10import java.util.HashMap;
11import java.util.HashSet;
12import java.util.Iterator;
13import java.util.LinkedList;
14import java.util.List;
15import java.util.Map;
16import java.util.Set;
17
18import org.semanticweb.HermiT.Configuration; 3import org.semanticweb.HermiT.Configuration;
19import org.semanticweb.HermiT.model.AnnotatedEquality; 4import org.semanticweb.HermiT.model.*;
20import org.semanticweb.HermiT.model.Atom;
21import org.semanticweb.HermiT.model.AtomicConcept;
22import org.semanticweb.HermiT.model.AtomicDataRange;
23import org.semanticweb.HermiT.model.AtomicNegationDataRange;
24import org.semanticweb.HermiT.model.AtomicRole;
25import org.semanticweb.HermiT.model.ConstantEnumeration;
26import org.semanticweb.HermiT.model.DLClause;
27import org.semanticweb.HermiT.model.DLOntology;
28import org.semanticweb.HermiT.model.DLPredicate;
29import org.semanticweb.HermiT.model.Equality;
30import org.semanticweb.HermiT.model.Inequality;
31import org.semanticweb.HermiT.model.InverseRole;
32import org.semanticweb.HermiT.model.Term;
33import org.semanticweb.HermiT.model.Variable;
34import org.semanticweb.HermiT.structural.OWLClausification; 5import org.semanticweb.HermiT.structural.OWLClausification;
35import org.semanticweb.owlapi.model.*; 6import org.semanticweb.owlapi.model.*;
36 7import org.semanticweb.simpleETL.SimpleETL;
37import uk.ac.ox.cs.pagoda.MyPrefixes; 8import uk.ac.ox.cs.pagoda.MyPrefixes;
38import uk.ac.ox.cs.pagoda.approx.KnowledgeBase; 9import uk.ac.ox.cs.pagoda.approx.KnowledgeBase;
39import uk.ac.ox.cs.pagoda.approx.RLPlusOntology; 10import uk.ac.ox.cs.pagoda.approx.RLPlusOntology;
@@ -44,7 +15,8 @@ import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
44import uk.ac.ox.cs.pagoda.owl.OWLHelper; 15import uk.ac.ox.cs.pagoda.owl.OWLHelper;
45import uk.ac.ox.cs.pagoda.util.Utility; 16import uk.ac.ox.cs.pagoda.util.Utility;
46 17
47import org.semanticweb.simpleETL.SimpleETL; 18import java.io.*;
19import java.util.*;
48 20
49public abstract class Program implements KnowledgeBase { 21public abstract class Program implements KnowledgeBase {
50 22
@@ -377,7 +349,7 @@ public abstract class Program implements KnowledgeBase {
377 } 349 }
378 350
379 public final String getDirectory() { 351 public final String getDirectory() {
380 return Utility.TempDirectory; 352 return Utility.getGlobalTempDirAbsolutePath();
381 } 353 }
382 354
383 public void deleteABoxTurtleFile() { 355 public void deleteABoxTurtleFile() {
diff --git a/src/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java b/src/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java
index a4cd790..611e183 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/UpperDatalogProgram.java
@@ -1,12 +1,13 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules;
2 2
3import org.semanticweb.HermiT.model.DLClause;
4import org.semanticweb.HermiT.model.DLPredicate;
5import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxBoth;
6
3import java.util.Collection; 7import java.util.Collection;
4import java.util.HashMap; 8import java.util.HashMap;
5import java.util.Map; 9import java.util.Map;
6 10
7import org.semanticweb.HermiT.model.DLClause;
8import org.semanticweb.HermiT.model.DLPredicate;
9
10 11
11public class UpperDatalogProgram extends UpperProgram { 12public class UpperDatalogProgram extends UpperProgram {
12 13
@@ -22,7 +23,7 @@ public class UpperDatalogProgram extends UpperProgram {
22 23
23 @Override 24 @Override
24 protected void initApproximator() { 25 protected void initApproximator() {
25 m_approx = new OverApproxBoth(); 26 m_approx = new OverApproxBoth();
26 } 27 }
27 28
28 public int getBottomNumber() { 29 public int getBottomNumber() {
diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java
new file mode 100644
index 0000000..f910c64
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/Approximator.java
@@ -0,0 +1,42 @@
1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import org.semanticweb.HermiT.model.DLClause;
4
5import java.util.Collection;
6
7public interface Approximator {
8
9 Collection<DLClause> convert(DLClause clause, DLClause originalClause);
10
11}
12
13// TODO remove
14//class IgnoreExist implements Approximator {
15//
16// @Override
17// public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
18// Collection<DLClause> ret = new LinkedList<DLClause>();
19// DLPredicate p;
20// for (Atom headAtom: clause.getHeadAtoms()) {
21// p = headAtom.getDLPredicate();
22// if (p instanceof AtLeast) return ret;
23// }
24//
25// ret.add(clause);
26// return ret;
27// }
28//
29//}
30//
31//
32//
33//class IgnoreDisj implements Approximator {
34//
35// @Override
36// public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
37// Collection<DLClause> ret = new LinkedList<DLClause>();
38// if (clause.getHeadLength() > 1) return ret;
39// ret.add(clause);
40// return ret;
41// }
42//}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java
new file mode 100644
index 0000000..3f1ed7e
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/LimitedSkolemisationApproximator.java
@@ -0,0 +1,150 @@
1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import org.semanticweb.HermiT.model.*;
4import uk.ac.ox.cs.pagoda.multistage.MultiStageUpperProgram;
5import uk.ac.ox.cs.pagoda.rules.ExistConstantApproximator;
6import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
7import uk.ac.ox.cs.pagoda.util.tuples.TupleBuilder;
8
9import java.util.ArrayList;
10import java.util.Arrays;
11import java.util.Collection;
12
13/**
14 * Approximates existential rules through a limited form of Skolemisation.
15 * <p>
16 * Given a rule and a ground substitution,
17 * it Skolemises the rule
18 * if all the terms in the substitution have depth less than a given depth,
19 * otherwise it approximates using an alternative <tt>TupleDependentApproximator</tt>.
20 * */
21public class LimitedSkolemisationApproximator implements TupleDependentApproximator {
22
23 private static final Atom[] EMPTY_BODY = new Atom[0];
24 private static final Variable X = Variable.create("X");
25 private final int maxTermDepth;
26 private final TupleDependentApproximator alternativeApproximator;
27 private final SkolemTermsManager skolemTermsManager;
28
29 public LimitedSkolemisationApproximator(int maxTermDepth) {
30 this(maxTermDepth, new ExistConstantApproximator());
31 }
32
33 public LimitedSkolemisationApproximator(int maxTermDepth, TupleDependentApproximator alternativeApproximator) {
34 this.maxTermDepth = maxTermDepth;
35 this.alternativeApproximator = alternativeApproximator;
36 this.skolemTermsManager = SkolemTermsManager.getInstance();
37 }
38
39 @Override
40 public Collection<DLClause> convert(DLClause clause,
41 DLClause originalClause,
42 Collection<Tuple<Individual>> violationTuples) {
43 switch (clause.getHeadLength()) {
44 case 1:
45 return overApprox(clause, originalClause, violationTuples);
46 case 0:
47 return Arrays.asList(clause);
48 default:
49 throw new IllegalArgumentException(
50 "Expected clause with head length < 1, but it is " + clause.getHeadLength());
51 }
52
53
54 }
55
56 private Collection<DLClause> overApprox(DLClause clause, DLClause originalClause, Collection<Tuple<Individual>> violationTuples) {
57 ArrayList<DLClause> result = new ArrayList<>();
58
59 for (Tuple<Individual> violationTuple : violationTuples)
60 if (getMaxDepth(violationTuple) > maxTermDepth)
61 result.addAll(alternativeApproximator.convert(clause, originalClause, null));
62 else
63 result.addAll(getGroundSkolemisation(clause, originalClause, violationTuple));
64
65 return result;
66 }
67
68 private Collection<DLClause> getGroundSkolemisation(DLClause clause,
69 DLClause originalClause,
70 Tuple<Individual> violationTuple) {
71
72 String[] commonVars = MultiStageUpperProgram.getCommonVars(clause);
73
74 // TODO check: strong assumption, the first tuples are the common ones
75 TupleBuilder<Individual> commonIndividualsBuilder = new TupleBuilder<>();
76 for (int i = 0; i < commonVars.length; i++)
77 commonIndividualsBuilder.append(violationTuple.get(i));
78 Tuple<Individual> commonIndividuals = commonIndividualsBuilder.build();
79
80 Atom headAtom = clause.getHeadAtom(0);
81
82// Atom[] bodyAtoms = clause.getBodyAtoms();
83 int offset = OverApproxExist.indexOfExistential(headAtom, originalClause);
84
85 // BEGIN: copy and paste
86 ArrayList<DLClause> ret = new ArrayList<>();
87 DLPredicate predicate = headAtom.getDLPredicate();
88 if (predicate instanceof AtLeastConcept) {
89 AtLeastConcept atLeastConcept = (AtLeastConcept) predicate;
90 LiteralConcept concept = atLeastConcept.getToConcept();
91 Role role = atLeastConcept.getOnRole();
92 AtomicConcept atomicConcept;
93
94 if (concept instanceof AtomicNegationConcept) {
95 // TODO CHECK: is this already in MultiStageUpperProgram?
96 Atom atom1 = Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X);
97 Atom atom2 = Atom.create(atomicConcept = OverApproxExist.getNegationConcept(atomicConcept), X);
98 ret.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2}));
99 }
100 else {
101 atomicConcept = (AtomicConcept) concept;
102 if (atomicConcept.equals(AtomicConcept.THING))
103 atomicConcept = null;
104 }
105
106 int card = atLeastConcept.getNumber();
107 Individual[] individuals = new Individual[card];
108 SkolemTermsManager termsManager = SkolemTermsManager.getInstance();
109 for (int i = 0; i < card; ++i)
110 individuals[i] = termsManager.getFreshIndividual(originalClause,
111 offset + i,
112 commonIndividuals);
113
114 for (int i = 0; i < card; ++i) {
115 if (atomicConcept != null)
116 ret.add(DLClause.create(new Atom[] {Atom.create(atomicConcept, individuals[i])}, EMPTY_BODY));
117
118 Atom atom = role instanceof AtomicRole ?
119 Atom.create((AtomicRole) role, commonIndividuals.get(0), individuals[i]) :
120 Atom.create(((InverseRole) role).getInverseOf(), individuals[i], commonIndividuals.get(0));
121
122 ret.add(DLClause.create(new Atom[] {atom}, EMPTY_BODY));
123 }
124
125 for (int i = 0; i < card; ++i)
126 for (int j = i + 1; j < card; ++j)
127 // TODO to be checked ... different as
128 ret.add(DLClause.create(new Atom[] {Atom.create(Inequality.INSTANCE, individuals[i], individuals[j])}, EMPTY_BODY));
129
130 }
131 else if (predicate instanceof AtLeastDataRange) {
132 // TODO to be implemented ...
133 }
134 else
135 ret.add(DLClause.create(new Atom[] {headAtom}, EMPTY_BODY));
136
137 return ret;
138
139 // END: copy and paste
140 }
141
142
143 public int getMaxDepth(Tuple<Individual> violationTuple) {
144 int maxDepth = 0;
145 for (Individual individual : violationTuple)
146 maxDepth = Integer.max(maxDepth, skolemTermsManager.getDepthOf(individual));
147
148 return maxDepth;
149 }
150}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/OverApproxBoth.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java
index 3cc2aba..ae2a2cf 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/OverApproxBoth.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxBoth.java
@@ -1,11 +1,11 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import java.util.Collection;
4import java.util.LinkedList;
5 2
6import org.semanticweb.HermiT.model.AtLeastDataRange; 3import org.semanticweb.HermiT.model.AtLeastDataRange;
7import org.semanticweb.HermiT.model.DLClause; 4import org.semanticweb.HermiT.model.DLClause;
8 5
6import java.util.Collection;
7import java.util.LinkedList;
8
9public class OverApproxBoth implements Approximator { 9public class OverApproxBoth implements Approximator {
10 10
11 Approximator approxDist = new OverApproxDisj(), approxExist = new OverApproxExist(); 11 Approximator approxDist = new OverApproxDisj(), approxExist = new OverApproxExist();
diff --git a/src/uk/ac/ox/cs/pagoda/rules/OverApproxDisj.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java
index 5edb08e..05d9442 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/OverApproxDisj.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxDisj.java
@@ -1,22 +1,19 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import java.util.Collection;
4import java.util.HashMap;
5import java.util.HashSet;
6import java.util.LinkedList;
7import java.util.Map;
8import java.util.Set;
9
10import org.semanticweb.HermiT.model.Atom;
11import org.semanticweb.HermiT.model.DLClause;
12import org.semanticweb.HermiT.model.DLPredicate;
13import org.semanticweb.HermiT.model.Term;
14import org.semanticweb.HermiT.model.Variable;
15 2
3import org.semanticweb.HermiT.model.*;
16import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 4import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
17 5
6import java.util.*;
7
18public class OverApproxDisj implements Approximator { 8public class OverApproxDisj implements Approximator {
19 9
10 /**
11 * Splits a disjunctive rule into a bunch of rules.
12 * <p>
13 * It returns a collection containing a rule for each atom in the head of the input rule.
14 * Each rule has the same body of the input rule,
15 * and the relative head atom as head.
16 * */
20 @Override 17 @Override
21 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) { 18 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
22 LinkedList<DLClause> distincts = new LinkedList<DLClause>(); 19 LinkedList<DLClause> distincts = new LinkedList<DLClause>();
@@ -91,7 +88,7 @@ public class OverApproxDisj implements Approximator {
91 arguments[i] = rename(atom.getArgument(i), unifier); 88 arguments[i] = rename(atom.getArgument(i), unifier);
92 return Atom.create(atom.getDLPredicate(), arguments); 89 return Atom.create(atom.getDLPredicate(), arguments);
93 } 90 }
94 91
95 public static Term rename(Term argument, Map<Variable, Term> unifier) { 92 public static Term rename(Term argument, Map<Variable, Term> unifier) {
96 Term newArg; 93 Term newArg;
97 while ((newArg = unifier.get(argument)) != null) 94 while ((newArg = unifier.get(argument)) != null)
diff --git a/src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java
index 7e66411..028568c 100644
--- a/src/uk/ac/ox/cs/pagoda/rules/OverApproxExist.java
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/OverApproxExist.java
@@ -1,131 +1,74 @@
1package uk.ac.ox.cs.pagoda.rules; 1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import org.semanticweb.HermiT.model.*;
4import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
2 5
3import java.util.Collection; 6import java.util.Collection;
4import java.util.HashMap;
5import java.util.Iterator; 7import java.util.Iterator;
6import java.util.LinkedList; 8import java.util.LinkedList;
7import java.util.Map;
8
9import org.semanticweb.HermiT.model.AtLeast;
10import org.semanticweb.HermiT.model.AtLeastConcept;
11import org.semanticweb.HermiT.model.AtLeastDataRange;
12import org.semanticweb.HermiT.model.Atom;
13import org.semanticweb.HermiT.model.AtomicConcept;
14import org.semanticweb.HermiT.model.AtomicNegationConcept;
15import org.semanticweb.HermiT.model.AtomicRole;
16import org.semanticweb.HermiT.model.DLClause;
17import org.semanticweb.HermiT.model.DLPredicate;
18import org.semanticweb.HermiT.model.Individual;
19import org.semanticweb.HermiT.model.Inequality;
20import org.semanticweb.HermiT.model.InverseRole;
21import org.semanticweb.HermiT.model.LiteralConcept;
22import org.semanticweb.HermiT.model.Role;
23import org.semanticweb.HermiT.model.Term;
24import org.semanticweb.HermiT.model.Variable;
25import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
26import uk.ac.ox.cs.pagoda.util.Namespace;
27 9
28public class OverApproxExist implements Approximator { 10public class OverApproxExist implements Approximator {
29 11
30 @Override 12 public static final String negativeSuffix = "_neg";
31 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) { 13 private static final Variable X = Variable.create("X");
32 Collection<DLClause> ret;
33 switch (clause.getHeadLength()) {
34 case 1:
35 return overApprox(clause.getHeadAtom(0), clause.getBodyAtoms(), originalClause);
36 case 0:
37 ret = new LinkedList<DLClause>();
38 ret.add(clause);
39 return ret;
40 default:
41 ret = new LinkedList<DLClause>();
42 for (Iterator<DLClause> iter = new DisjunctiveHeads(clause, originalClause); iter.hasNext(); )
43 ret.add(iter.next());
44 return ret;
45 }
46 }
47
48 private static int noOfExistential(DLClause originalClause) {
49 int no = 0;
50 for (Atom atom: originalClause.getHeadAtoms())
51 if (atom.getDLPredicate() instanceof AtLeast)
52 no += ((AtLeast) atom.getDLPredicate()).getNumber();
53 return no;
54 }
55 14
56 private static int indexOfExistential(Atom headAtom, DLClause originalClause) { 15 static int indexOfExistential(Atom headAtom, DLClause originalClause) {
57 if (!(headAtom.getDLPredicate() instanceof AtLeast)) return -1; 16 if (!(headAtom.getDLPredicate() instanceof AtLeast)) return -1;
58 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate(); 17 AtLeastConcept alc = (AtLeastConcept) headAtom.getDLPredicate();
59 if (alc.getToConcept() instanceof AtomicConcept) { 18 if (alc.getToConcept() instanceof AtomicConcept) {
60 AtomicConcept ac = (AtomicConcept) alc.getToConcept(); 19 AtomicConcept ac = (AtomicConcept) alc.getToConcept();
61 if (ac.getIRI().endsWith(negativeSuffix)) { 20 if (ac.getIRI().endsWith(negativeSuffix)) {
62 alc = AtLeastConcept.create(alc.getNumber(), alc.getOnRole(), AtomicNegationConcept.create(getNegationConcept(ac))); 21 alc = AtLeastConcept.create(alc.getNumber(), alc.getOnRole(), AtomicNegationConcept.create(getNegationConcept(ac)));
63 headAtom = Atom.create(alc, headAtom.getArgument(0)); 22 headAtom = Atom.create(alc, headAtom.getArgument(0));
64 } 23 }
65 } 24 }
66 25
67 int index = 0; 26 int index = 0;
68 for (Atom atom: originalClause.getHeadAtoms()) { 27 for (Atom atom: originalClause.getHeadAtoms()) {
69 if (atom.equals(headAtom)) 28 if (atom.equals(headAtom))
70 return index; 29 return index;
71 if (atom.getDLPredicate() instanceof AtLeast) 30 if (atom.getDLPredicate() instanceof AtLeast)
72 index += ((AtLeast) atom.getDLPredicate()).getNumber(); 31 index += ((AtLeast) atom.getDLPredicate()).getNumber();
73 } 32 }
74 return -1; 33 return -1;
75 } 34 }
76 35
77 private static final Variable X = Variable.create("X");
78 public static final String negativeSuffix = "_neg";
79
80 public static AtomicConcept getNegationConcept(DLPredicate p) { 36 public static AtomicConcept getNegationConcept(DLPredicate p) {
81 if (p.equals(AtomicConcept.THING)) return AtomicConcept.NOTHING; 37 if (p.equals(AtomicConcept.THING)) return AtomicConcept.NOTHING;
82 if (p.equals(AtomicConcept.NOTHING)) return AtomicConcept.THING; 38 if (p.equals(AtomicConcept.NOTHING)) return AtomicConcept.THING;
83 39
84 if (p instanceof AtomicConcept) { 40 if (p instanceof AtomicConcept) {
85 String iri = ((AtomicConcept) p).getIRI(); 41 String iri = ((AtomicConcept) p).getIRI();
86 if (iri.endsWith(negativeSuffix)) 42 if (iri.endsWith(negativeSuffix))
87 iri = iri.substring(0, iri.length() - 4); 43 iri = iri.substring(0, iri.length() - 4);
88 else 44 else
89 iri += negativeSuffix; 45 iri += negativeSuffix;
90 46
91 return AtomicConcept.create(iri); 47 return AtomicConcept.create(iri);
92 } 48 }
93 if (p instanceof AtLeastConcept) { 49 if (p instanceof AtLeastConcept) {
94 // FIXME !!! here 50 // FIXME !!! here
95 return null; 51 return null;
96 } 52 }
97 return null; 53 return null;
98 }
99
100 public static final String skolemisedIndividualPrefix = Namespace.PAGODA_ANONY + "individual";
101
102 private static int individualCounter = 0;
103 private static Map<DLClause, Integer> individualNumber = new HashMap<DLClause, Integer>();
104
105 public static int getNumberOfSkolemisedIndividual() {
106 return individualCounter;
107 } 54 }
108 55
109 public static Individual getNewIndividual(DLClause originalClause, int offset) { 56 @Override
110 Individual ret; 57 public Collection<DLClause> convert(DLClause clause, DLClause originalClause) {
111 if (individualNumber.containsKey(originalClause)) { 58 Collection<DLClause> ret;
112 ret = Individual.create(skolemisedIndividualPrefix + (individualNumber.get(originalClause) + offset)); 59 switch (clause.getHeadLength()) {
113 } 60 case 1:
114 else { 61 return overApprox(clause.getHeadAtom(0), clause.getBodyAtoms(), originalClause);
115 individualNumber.put(originalClause, individualCounter); 62 case 0:
116 ret = Individual.create(skolemisedIndividualPrefix + (individualCounter + offset)); 63 ret = new LinkedList<DLClause>();
117 individualCounter += noOfExistential(originalClause); 64 ret.add(clause);
118 } 65 return ret;
119 return ret; 66 default:
120 } 67 ret = new LinkedList<DLClause>();
121 68 for (Iterator<DLClause> iter = new DisjunctiveHeads(clause, originalClause); iter.hasNext(); )
122 public static int indexOfSkolemisedIndividual(Atom atom) { 69 ret.add(iter.next());
123 Term t; 70 return ret;
124 for (int index = 0; index < atom.getArity(); ++index) {
125 t = atom.getArgument(index);
126 if (t instanceof Individual && ((Individual) t).getIRI().contains(skolemisedIndividualPrefix)) return index;
127 } 71 }
128 return -1;
129 } 72 }
130 73
131 public Collection<DLClause> overApprox(Atom headAtom, Atom[] bodyAtoms, DLClause originalClause) { 74 public Collection<DLClause> overApprox(Atom headAtom, Atom[] bodyAtoms, DLClause originalClause) {
@@ -142,6 +85,7 @@ public class OverApproxExist implements Approximator {
142 AtomicConcept atomicConcept = null; 85 AtomicConcept atomicConcept = null;
143 86
144 if (concept instanceof AtomicNegationConcept) { 87 if (concept instanceof AtomicNegationConcept) {
88 // TODO CHECK: is this already in MultiStageUpperProgram?
145 Atom atom1 = Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X); 89 Atom atom1 = Atom.create(atomicConcept = ((AtomicNegationConcept) concept).getNegatedAtomicConcept(), X);
146 Atom atom2 = Atom.create(atomicConcept = getNegationConcept(atomicConcept), X); 90 Atom atom2 = Atom.create(atomicConcept = getNegationConcept(atomicConcept), X);
147 ret.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2})); 91 ret.add(DLClause.create(new Atom[0], new Atom[] {atom1, atom2}));
@@ -154,7 +98,8 @@ public class OverApproxExist implements Approximator {
154 98
155 int card = atLeastConcept.getNumber(); 99 int card = atLeastConcept.getNumber();
156 Individual[] individuals = new Individual[card]; 100 Individual[] individuals = new Individual[card];
157 for (int i = 0; i < card; ++i) individuals[i] = getNewIndividual(originalClause, offset + i); 101 SkolemTermsManager termsManager = SkolemTermsManager.getInstance();
102 for (int i = 0; i < card; ++i) individuals[i] = termsManager.getFreshIndividual(originalClause, offset + i);
158 103
159 for (int i = 0; i < card; ++i) { 104 for (int i = 0; i < card; ++i) {
160 if (atomicConcept != null) 105 if (atomicConcept != null)
diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java
new file mode 100644
index 0000000..368c014
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/SkolemTermsManager.java
@@ -0,0 +1,118 @@
1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import org.semanticweb.HermiT.model.*;
4import uk.ac.ox.cs.pagoda.util.Namespace;
5import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
6
7import java.util.HashMap;
8import java.util.Map;
9
10/**
11 * If you need a Skolem term (i.e. fresh individual), ask this class.
12 */
13public class SkolemTermsManager {
14
15 public static final String SKOLEMISED_INDIVIDUAL_PREFIX = Namespace.PAGODA_ANONY + "individual";
16
17 private static SkolemTermsManager skolemTermsManager;
18
19 private int termsCounter = 0;
20 private Map<DLClause, Integer> mapClauseToId = new HashMap<>();
21 private Map<Individual, Integer> mapTermToDepth = new HashMap<>();
22 private int dependenciesCounter = 0;
23
24 // TODO replace with hashcode. in case of collision you get only a different upper bound model.
25 // TODO you can use a cash.
26 private Map<Tuple<Individual>, Integer> mapDependencyToId = new HashMap<>();
27
28 private SkolemTermsManager() {
29 }
30
31 public static int indexOfSkolemisedIndividual(Atom atom) {
32 Term t;
33 for(int index = 0; index < atom.getArity(); ++index) {
34 t = atom.getArgument(index);
35 if(t instanceof Individual && ((Individual) t).getIRI().contains(SKOLEMISED_INDIVIDUAL_PREFIX))
36 return index;
37 }
38 return -1;
39 }
40
41 public static SkolemTermsManager getInstance() {
42 if(skolemTermsManager == null) skolemTermsManager = new SkolemTermsManager();
43 return skolemTermsManager;
44 }
45
46 /**
47 * Get a fresh Individual, unique for the clause, the offset and the dependency.
48 * */
49 public Individual getFreshIndividual(DLClause originalClause, int offset, Tuple<Individual> dependency) {
50 if(!mapClauseToId.containsKey(originalClause)) {
51 mapClauseToId.put(originalClause, termsCounter);
52 termsCounter += noOfExistential(originalClause);
53 }
54 if (!mapDependencyToId.containsKey(dependency)) {
55 mapDependencyToId.put(dependency, dependenciesCounter++);
56 }
57
58 String termId = mapClauseToId.get(originalClause) + offset + "_" + mapDependencyToId(dependency);
59 Individual newIndividual = Individual.create(SKOLEMISED_INDIVIDUAL_PREFIX + termId);
60
61 int depth = 0;
62 for (Individual individual : dependency)
63 depth = Integer.max(depth, mapIndividualToDepth(individual));
64 mapTermToDepth.put(newIndividual, depth);
65
66 return newIndividual;
67 }
68
69 /**
70 * Get a fresh Individual, unique for the clause and the offset.
71 * */
72 public Individual getFreshIndividual(DLClause originalClause, int offset) {
73 if(!mapClauseToId.containsKey(originalClause)) {
74 mapClauseToId.put(originalClause, termsCounter);
75 termsCounter += noOfExistential(originalClause);
76 }
77
78 String termId = "" + mapClauseToId.get(originalClause) + offset;
79 Individual newIndividual = Individual.create(SKOLEMISED_INDIVIDUAL_PREFIX + termId);
80 mapTermToDepth.put(newIndividual, 0);
81
82 return newIndividual;
83 }
84
85 /**
86 * Get the depth of a term.
87 * <p>
88 * The term must have been generated by this manager.
89 * */
90 public int getDepthOf(Individual individual) {
91 return mapIndividualToDepth(individual);
92 }
93
94 /**
95 * Get the number of individuals generated by this manager.
96 * */
97 public int getNumberOfSkolemisedIndividual() {
98 return mapTermToDepth.keySet().size();
99 }
100
101 private int mapDependencyToId(Tuple<Individual> dependency) {
102 if (mapDependencyToId.containsKey(dependency)) return mapDependencyToId.get(dependency);
103 else return mapDependencyToId.put(dependency, dependenciesCounter++);
104 }
105
106 private int mapIndividualToDepth(Individual dependency) {
107 if(mapTermToDepth.containsKey(dependency)) return mapTermToDepth.get(dependency);
108 else return 0;
109 }
110
111 private int noOfExistential(DLClause originalClause) {
112 int no = 0;
113 for (Atom atom : originalClause.getHeadAtoms())
114 if (atom.getDLPredicate() instanceof AtLeast)
115 no += ((AtLeast) atom.getDLPredicate()).getNumber();
116 return no;
117 }
118}
diff --git a/src/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java b/src/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java
new file mode 100644
index 0000000..c99a1ad
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/rules/approximators/TupleDependentApproximator.java
@@ -0,0 +1,19 @@
1package uk.ac.ox.cs.pagoda.rules.approximators;
2
3import org.semanticweb.HermiT.model.DLClause;
4import org.semanticweb.HermiT.model.Individual;
5import uk.ac.ox.cs.pagoda.util.tuples.Tuple;
6
7import java.util.Collection;
8
9/**
10 * It can approximate clauses according to a collection of tuples of individuals.
11 * <p>
12 * In particular it can be used to approximate rules given some body instantiations.
13 */
14public interface TupleDependentApproximator {
15
16 Collection<DLClause> convert(DLClause clause,
17 DLClause originalClause,
18 Collection<Tuple<Individual>> individualsTuples);
19}
diff --git a/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java b/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java
index a0ecce8..90a2ed4 100644
--- a/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java
+++ b/src/uk/ac/ox/cs/pagoda/summary/HermitSummaryFilter.java
@@ -1,16 +1,6 @@
1package uk.ac.ox.cs.pagoda.summary; 1package uk.ac.ox.cs.pagoda.summary;
2 2
3import java.util.HashSet; 3import org.semanticweb.owlapi.model.*;
4import java.util.Set;
5
6import org.semanticweb.owlapi.model.IRI;
7import org.semanticweb.owlapi.model.OWLAxiom;
8import org.semanticweb.owlapi.model.OWLClass;
9import org.semanticweb.owlapi.model.OWLDataFactory;
10import org.semanticweb.owlapi.model.OWLOntology;
11import org.semanticweb.owlapi.model.OWLOntologyCreationException;
12import org.semanticweb.owlapi.model.OWLOntologyManager;
13
14import uk.ac.ox.cs.JRDFox.model.Individual; 4import uk.ac.ox.cs.JRDFox.model.Individual;
15import uk.ac.ox.cs.pagoda.endomorph.Endomorph; 5import uk.ac.ox.cs.pagoda.endomorph.Endomorph;
16import uk.ac.ox.cs.pagoda.owl.OWLHelper; 6import uk.ac.ox.cs.pagoda.owl.OWLHelper;
@@ -25,166 +15,166 @@ import uk.ac.ox.cs.pagoda.tracking.TrackingRuleEncoder;
25import uk.ac.ox.cs.pagoda.util.Timer; 15import uk.ac.ox.cs.pagoda.util.Timer;
26import uk.ac.ox.cs.pagoda.util.Utility; 16import uk.ac.ox.cs.pagoda.util.Utility;
27 17
18import java.util.HashSet;
19import java.util.Set;
20
28public class HermitSummaryFilter implements Checker { 21public class HermitSummaryFilter implements Checker {
29 22
30 QueryRecord m_record; 23 public static final String QueryAnswerTermPrefix = TrackingRuleEncoder.QueryPredicate + "_term";
24 QueryRecord m_record;
31 Summary summary = null; 25 Summary summary = null;
32 HermitChecker summarisedHermiT = null; 26 HermitChecker summarisedHermiT = null;
33 boolean summarisedConsistency; 27 boolean summarisedConsistency;
34
35 Endomorph endomorphismChecker = null; 28 Endomorph endomorphismChecker = null;
36 29
37 public HermitSummaryFilter(QueryRecord record, boolean toCallHermiT) { 30 public HermitSummaryFilter(QueryRecord record, boolean toCallHermiT) {
38 m_record = record; 31 m_record = record;
39 HermitChecker hermitChecker = new HermitChecker(record.getRelevantOntology(), record, toCallHermiT); 32 HermitChecker hermitChecker = new HermitChecker(record.getRelevantOntology(), record, toCallHermiT);
40 endomorphismChecker = new Endomorph(record, hermitChecker); 33 endomorphismChecker = new Endomorph(record, hermitChecker);
41 hermitChecker.setDependencyGraph(endomorphismChecker.getDependencyGraph()); 34 hermitChecker.setDependencyGraph(endomorphismChecker.getDependencyGraph());
35 }
36
37 public static OWLOntology addOntologyWithQueryPreciate(OWLOntology ontology, QueryRecord record, AnswerTuples answers) {
38 OWLOntology newOntology = null;
39 OWLOntologyManager manager = ontology.getOWLOntologyManager();
40 OWLDataFactory factory = manager.getOWLDataFactory();
41 try {
42 newOntology = manager.createOntology();
43 manager.addAxioms(newOntology, ontology.getAxioms());
44
45 OWLClass[] queryClass = new OWLClass[answers.getArity()];
46 int arity = answers.getArity();
47 for(int i = 0; i < arity; ++i)
48 queryClass[i] = factory.getOWLClass(IRI.create(QueryAnswerTermPrefix + i));
49 AnswerTuple answer;
50 for(; answers.isValid(); answers.moveNext()) {
51 answer = answers.getTuple();
52 for(int i = 0; i < arity; ++i)
53 if(answer.getGroundTerm(i) instanceof Individual) {
54 String iri = ((Individual) answer.getGroundTerm(i)).getIRI();
55 if(!record.isPredicate(answer, i)) {
56 manager.addAxiom(newOntology,
57 factory.getOWLClassAssertionAxiom(
58 queryClass[i],
59 factory.getOWLNamedIndividual(IRI.create(iri))));
60 }
61 }
62 }
63 answers.reset();
64 } catch(OWLOntologyCreationException e) {
65 e.printStackTrace();
66 }
67
68 return newOntology;
69 }
70
71 public static void printRelatedABoxAxioms(OWLOntology onto, String str) {
72 if(!str.startsWith("<")) str = OWLHelper.addAngles(str);
73
74 System.out.println("Axioms in " + onto.getOntologyID().getOntologyIRI() + " related to " + str);
75
76 for(OWLAxiom axiom : onto.getABoxAxioms(true))
77 if(axiom.toString().contains(str))
78 System.out.println(axiom);
79
80 System.out.println("-----------------------------");
81 }
82
83 public static void printRelatedTBoxAxioms(OWLOntology onto, String str) {
84
85 System.out.println("Axioms in " + onto.getOntologyID().getOntologyIRI() + " related to " + str);
86
87 for(OWLAxiom axiom : onto.getTBoxAxioms(true))
88 if(axiom.toString().contains(str))
89 System.out.println(axiom);
90
91 for(OWLAxiom axiom : onto.getRBoxAxioms(true))
92 if(axiom.toString().contains(str))
93 System.out.println(axiom);
94
95 System.out.println("-----------------------------");
42 } 96 }
43 97
44 @Override 98 @Override
45 public boolean isConsistent() { 99 public boolean isConsistent() {
46 if (summary == null) 100 if (summary == null)
47 summary = new Summary(endomorphismChecker.getOntology(), endomorphismChecker.getGraph()); 101 summary = new Summary(endomorphismChecker.getOntology(), endomorphismChecker.getGraph());
48 102
49 if (summarisedHermiT == null) 103 if(summarisedHermiT == null)
50 initialiseSummarisedReasoner(); 104 initialiseSummarisedReasoner();
51 105
52 if (summarisedConsistency) return true; 106 if(summarisedConsistency) return true;
53 return endomorphismChecker.isConsistent(); 107 return endomorphismChecker.isConsistent();
54 } 108 }
55 109
56 private void initialiseSummarisedReasoner() { 110 private void initialiseSummarisedReasoner() {
57 Timer t = new Timer(); 111 Timer t = new Timer();
58 summarisedHermiT = new HermitChecker(summary.getSummary(), summary.getSummary(m_record)); 112 summarisedHermiT = new HermitChecker(summary.getSummary(), summary.getSummary(m_record));
59// summary.save("summarised_query" + m_record.getQueryID() + ".owl"); 113// summary.save("summarised_query" + m_record.getQueryID() + ".owl");
60 if (summarisedConsistency = summarisedHermiT.isConsistent()) 114 if(summarisedConsistency = summarisedHermiT.isConsistent())
61 Utility.logDebug("The summary of ABox is consistent with the TBox."); 115 Utility.logDebug("The summary of ABox is consistent with the TBox.");
62 else 116 else
63 Utility.logDebug("The summary of ABox is NOT consistent with the TBox."); 117 Utility.logDebug("The summary of ABox is NOT consistent with the TBox.");
64 m_record.addProcessingTime(Step.Summarisation, t.duration()); 118 m_record.addProcessingTime(Step.SUMMARISATION, t.duration());
65 } 119 }
66 120
67 @Override 121 @Override
68 public int check(AnswerTuples answers) { 122 public int check(AnswerTuples answers) {
69 Timer t = new Timer(); 123 Timer t = new Timer();
70 OWLOntology newOntology = addOntologyWithQueryPreciate(endomorphismChecker.getOntology(), m_record, answers); 124 OWLOntology newOntology = addOntologyWithQueryPreciate(endomorphismChecker.getOntology(), m_record, answers);
71 summary = new Summary(newOntology); 125 summary = new Summary(newOntology);
72 initialiseSummarisedReasoner(); 126 initialiseSummarisedReasoner();
73 127
74 if (summarisedConsistency) { 128 if (summarisedConsistency) {
75 Set<AnswerTuple> passed = new HashSet<AnswerTuple>(), succ = new HashSet<AnswerTuple>(); 129 Set<AnswerTuple> passed = new HashSet<AnswerTuple>(), succ = new HashSet<AnswerTuple>();
76 Set<AnswerTuple> falsified = new HashSet<AnswerTuple>(), fail = new HashSet<AnswerTuple>(); 130 Set<AnswerTuple> falsified = new HashSet<AnswerTuple>(), fail = new HashSet<AnswerTuple>();
77 131
78 int counter = 0; 132 int counter = 0;
79 AnswerTuple representative; 133 AnswerTuple representative;
80 for (AnswerTuple answer; answers.isValid(); answers.moveNext()) { 134 for (AnswerTuple answer; answers.isValid(); answers.moveNext()) {
81 ++counter; 135 ++counter;
82 answer = answers.getTuple(); 136 answer = answers.getTuple();
83 representative = summary.getSummary(answer); 137 representative = summary.getSummary(answer);
84 if (fail.contains(representative)) 138 if(fail.contains(representative))
85 falsified.add(answer); 139 falsified.add(answer);
86 else if (succ.contains(representative)) 140 else if(succ.contains(representative))
87 passed.add(answer); 141 passed.add(answer);
88 else 142 else if(summarisedHermiT.check(representative)) {
89 if (summarisedHermiT.check(representative)) {
90 succ.add(representative); 143 succ.add(representative);
91 passed.add(answer); 144 passed.add(answer);
92 } 145 }
93 else { 146 else {
94 fail.add(representative); 147 fail.add(representative);
95 falsified.add(answer); 148 falsified.add(answer);
96 } 149 }
97 } 150 }
98 answers.dispose(); 151 answers.dispose();
99 152
100 Utility.logDebug("@TIME to filter out non-answers by summarisation: " + t.duration()); 153 Utility.logDebug("@TIME to filter out non-answers by summarisation: " + t.duration());
101 154
102 m_record.removeUpperBoundAnswers(falsified); 155 m_record.removeUpperBoundAnswers(falsified);
103 156
104 if (m_record.processed()) { 157 if(m_record.isProcessed()) {
105 m_record.setDifficulty(Step.Summarisation); 158 m_record.setDifficulty(Step.SUMMARISATION);
106 m_record.addProcessingTime(Step.Summarisation, t.duration()); 159 m_record.addProcessingTime(Step.SUMMARISATION, t.duration());
107 return 0; 160 return 0;
108 } 161 }
109 162
110 Utility.logDebug("The number of answers to be checked with HermiT: " + passed.size() + "/" + counter); 163 Utility.logDebug("The number of answers to be checked with HermiT: " + passed.size() + "/" + counter);
111 164
112 m_record.setDifficulty(Step.FullReasoning); 165 m_record.setDifficulty(Step.FULL_REASONING);
113 m_record.addProcessingTime(Step.Summarisation, t.duration()); 166 m_record.addProcessingTime(Step.SUMMARISATION, t.duration());
114 167
115 return endomorphismChecker.check(new AnswerTuplesImp(m_record.getAnswerVariables(), passed)); 168 return endomorphismChecker.check(new AnswerTuplesImp(m_record.getAnswerVariables(), passed));
116 } 169 }
117 else { 170 else {
118 m_record.addProcessingTime(Step.Summarisation, t.duration()); 171 m_record.addProcessingTime(Step.SUMMARISATION, t.duration());
119// m_record.saveRelevantOntology("fragment.owl"); 172// m_record.saveRelevantOntology("fragment.owl");
120 m_record.setDifficulty(Step.FullReasoning); 173 m_record.setDifficulty(Step.FULL_REASONING);
121 return endomorphismChecker.check(answers); 174 return endomorphismChecker.check(answers);
122 } 175 }
123 } 176 }
124 177
125 public static final String QueryAnswerTermPrefix = TrackingRuleEncoder.QueryPredicate + "_term";
126
127 public static OWLOntology addOntologyWithQueryPreciate(OWLOntology ontology, QueryRecord record, AnswerTuples answers) {
128 OWLOntology newOntology = null;
129 OWLOntologyManager manager = ontology.getOWLOntologyManager();
130 OWLDataFactory factory = manager.getOWLDataFactory();
131 try {
132 newOntology = manager.createOntology();
133 manager.addAxioms(newOntology, ontology.getAxioms());
134
135 OWLClass[] queryClass = new OWLClass[answers.getArity()];
136 int arity = answers.getArity();
137 for (int i = 0; i < arity; ++i)
138 queryClass[i] = factory.getOWLClass(IRI.create(QueryAnswerTermPrefix + i));
139 AnswerTuple answer;
140 for (; answers.isValid(); answers.moveNext()) {
141 answer = answers.getTuple();
142 for (int i = 0; i < arity; ++i)
143 if (answer.getGroundTerm(i) instanceof Individual) {
144 String iri = ((Individual) answer.getGroundTerm(i)).getIRI();
145 if (!record.isPredicate(answer, i)) {
146 manager.addAxiom(newOntology,
147 factory.getOWLClassAssertionAxiom(
148 queryClass[i],
149 factory.getOWLNamedIndividual(IRI.create(iri))));
150 }
151 }
152 }
153 answers.reset();
154 } catch (OWLOntologyCreationException e) {
155 e.printStackTrace();
156 }
157
158 return newOntology;
159 }
160
161 public static void printRelatedABoxAxioms(OWLOntology onto, String str) {
162 if (!str.startsWith("<")) str = OWLHelper.addAngles(str);
163
164 System.out.println("Axioms in " + onto.getOntologyID().getOntologyIRI() + " related to " + str);
165
166 for (OWLAxiom axiom: onto.getABoxAxioms(true))
167 if (axiom.toString().contains(str))
168 System.out.println(axiom);
169
170 System.out.println("-----------------------------");
171 }
172
173 public static void printRelatedTBoxAxioms(OWLOntology onto, String str) {
174
175 System.out.println("Axioms in " + onto.getOntologyID().getOntologyIRI() + " related to " + str);
176
177 for (OWLAxiom axiom: onto.getTBoxAxioms(true))
178 if (axiom.toString().contains(str))
179 System.out.println(axiom);
180
181 for (OWLAxiom axiom: onto.getRBoxAxioms(true))
182 if (axiom.toString().contains(str))
183 System.out.println(axiom);
184
185 System.out.println("-----------------------------");
186 }
187
188 @Override 178 @Override
189 public boolean check(AnswerTuple answer) { 179 public boolean check(AnswerTuple answer) {
190 AnswerTuple representative = summary.getSummary(answer); 180 AnswerTuple representative = summary.getSummary(answer);
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java b/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java
index 2973109..d2d041f 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java
@@ -1,24 +1,13 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import java.util.HashSet;
4import java.util.Iterator;
5import java.util.LinkedList;
6import java.util.Set;
7
8import org.semanticweb.HermiT.model.DLClause; 3import org.semanticweb.HermiT.model.DLClause;
9import org.semanticweb.owlapi.model.IRI; 4import org.semanticweb.owlapi.model.*;
10import org.semanticweb.owlapi.model.OWLAxiom; 5import uk.ac.ox.cs.JRDFox.JRDFStoreException;
11import org.semanticweb.owlapi.model.OWLClass; 6import uk.ac.ox.cs.JRDFox.model.Datatype;
12import org.semanticweb.owlapi.model.OWLDataFactory; 7import uk.ac.ox.cs.JRDFox.store.DataStore;
13import org.semanticweb.owlapi.model.OWLDataProperty; 8import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
14import org.semanticweb.owlapi.model.OWLLiteral; 9import uk.ac.ox.cs.JRDFox.store.Resource;
15import org.semanticweb.owlapi.model.OWLIndividual; 10import uk.ac.ox.cs.JRDFox.store.TupleIterator;
16import org.semanticweb.owlapi.model.OWLObject;
17import org.semanticweb.owlapi.model.OWLObjectProperty;
18import org.semanticweb.owlapi.model.OWLOntology;
19import org.semanticweb.owlapi.model.OWLOntologyCreationException;
20import org.semanticweb.owlapi.model.OWLOntologyManager;
21
22import uk.ac.ox.cs.pagoda.MyPrefixes; 11import uk.ac.ox.cs.pagoda.MyPrefixes;
23import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 12import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
24import uk.ac.ox.cs.pagoda.owl.OWLHelper; 13import uk.ac.ox.cs.pagoda.owl.OWLHelper;
@@ -30,12 +19,11 @@ import uk.ac.ox.cs.pagoda.util.Namespace;
30import uk.ac.ox.cs.pagoda.util.Timer; 19import uk.ac.ox.cs.pagoda.util.Timer;
31import uk.ac.ox.cs.pagoda.util.UFS; 20import uk.ac.ox.cs.pagoda.util.UFS;
32import uk.ac.ox.cs.pagoda.util.Utility; 21import uk.ac.ox.cs.pagoda.util.Utility;
33import uk.ac.ox.cs.JRDFox.JRDFStoreException; 22
34import uk.ac.ox.cs.JRDFox.model.Datatype; 23import java.util.HashSet;
35import uk.ac.ox.cs.JRDFox.store.DataStore; 24import java.util.Iterator;
36import uk.ac.ox.cs.JRDFox.store.Resource; 25import java.util.LinkedList;
37import uk.ac.ox.cs.JRDFox.store.TupleIterator; 26import java.util.Set;
38import uk.ac.ox.cs.JRDFox.store.DataStore.UpdateType;
39 27
40public class QueryTracker { 28public class QueryTracker {
41 29
@@ -85,7 +73,7 @@ public class QueryTracker {
85 store.applyReasoning(incrementally); 73 store.applyReasoning(incrementally);
86 tripleCount = store.getTriplesCount(); 74 tripleCount = store.getTriplesCount();
87 75
88 Utility.logDebug("tracking store after materialising tracking program: " 76 Utility.logInfo("tracking store after materialising tracking program: "
89 + tripleCount 77 + tripleCount
90 + " (" 78 + " ("
91 + (tripleCount - oldTripleCount) 79 + (tripleCount - oldTripleCount)
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj.java
index cee829f..b169053 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj.java
@@ -1,30 +1,14 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.Collections;
5import java.util.HashMap;
6import java.util.HashSet;
7import java.util.Iterator;
8import java.util.LinkedList;
9import java.util.Map;
10
11import org.semanticweb.HermiT.model.AnnotatedEquality;
12import org.semanticweb.HermiT.model.AtLeastConcept;
13import org.semanticweb.HermiT.model.Atom;
14import org.semanticweb.HermiT.model.AtomicConcept;
15import org.semanticweb.HermiT.model.AtomicRole;
16import org.semanticweb.HermiT.model.DLClause;
17import org.semanticweb.HermiT.model.DLPredicate;
18import org.semanticweb.HermiT.model.Equality;
19import org.semanticweb.HermiT.model.Individual;
20import org.semanticweb.HermiT.model.Inequality;
21
22import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 4import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
23import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 5import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
24import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
25import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 6import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
7import uk.ac.ox.cs.pagoda.rules.approximators.SkolemTermsManager;
26import uk.ac.ox.cs.pagoda.util.Namespace; 8import uk.ac.ox.cs.pagoda.util.Namespace;
27 9
10import java.util.*;
11
28public abstract class TrackingRuleEncoderDisj extends TrackingRuleEncoderWithGap { 12public abstract class TrackingRuleEncoderDisj extends TrackingRuleEncoderWithGap {
29 13
30 public TrackingRuleEncoderDisj(UpperDatalogProgram program, BasicQueryEngine store) { 14 public TrackingRuleEncoderDisj(UpperDatalogProgram program, BasicQueryEngine store) {
@@ -38,8 +22,9 @@ public abstract class TrackingRuleEncoderDisj extends TrackingRuleEncoderWithGap
38 */ 22 */
39 protected void processDisjunctiveRules() { 23 protected void processDisjunctiveRules() {
40 Map<Atom, Collection<DLClause>> auxiliaryAtoms = new HashMap<Atom, Collection<DLClause>>(); 24 Map<Atom, Collection<DLClause>> auxiliaryAtoms = new HashMap<Atom, Collection<DLClause>>();
41 Map<Individual, Collection<DLClause>> skolemisedAtoms = new HashMap<Individual, Collection<DLClause>>(); 25 Map<Individual, Collection<DLClause>> skolemisedAtoms = new HashMap<Individual, Collection<DLClause>>();
42 26 SkolemTermsManager termsManager = SkolemTermsManager.getInstance();
27
43 for (Map.Entry<DLClause, Collection<DLClause>> entry: disjunctiveRules.entrySet()) { 28 for (Map.Entry<DLClause, Collection<DLClause>> entry: disjunctiveRules.entrySet()) {
44 DLClause original = entry.getKey(); 29 DLClause original = entry.getKey();
45 Collection<DLClause> overClauses = entry.getValue(); 30 Collection<DLClause> overClauses = entry.getValue();
@@ -49,7 +34,7 @@ public abstract class TrackingRuleEncoderDisj extends TrackingRuleEncoderWithGap
49 DLClause subClause = iter.next(); 34 DLClause subClause = iter.next();
50 if (DLClauseHelper.hasSubsetBodyAtoms(subClause, original)) { 35 if (DLClauseHelper.hasSubsetBodyAtoms(subClause, original)) {
51 Atom headAtom = subClause.getHeadAtom(0); 36 Atom headAtom = subClause.getHeadAtom(0);
52 if ((index = OverApproxExist.indexOfSkolemisedIndividual(headAtom)) != -1) { 37 if ((index = SkolemTermsManager.indexOfSkolemisedIndividual(headAtom)) != -1) {
53 Individual i = (Individual) headAtom.getArgument(index); 38 Individual i = (Individual) headAtom.getArgument(index);
54 Collection<DLClause> clauses = skolemisedAtoms.get(i); 39 Collection<DLClause> clauses = skolemisedAtoms.get(i);
55 if (clauses == null) { 40 if (clauses == null) {
@@ -71,7 +56,7 @@ public abstract class TrackingRuleEncoderDisj extends TrackingRuleEncoderWithGap
71 Collection<DLClause> clauses = new HashSet<DLClause>(); 56 Collection<DLClause> clauses = new HashSet<DLClause>();
72 Individual[] individuals = new Individual[alc.getNumber()]; 57 Individual[] individuals = new Individual[alc.getNumber()];
73 for (int i = 0; i < alc.getNumber(); ++i) { 58 for (int i = 0; i < alc.getNumber(); ++i) {
74 individuals[i] = OverApproxExist.getNewIndividual(original, i); 59 individuals[i] = termsManager.getFreshIndividual(original, i);
75 clauses.addAll(skolemisedAtoms.get(individuals[i])); 60 clauses.addAll(skolemisedAtoms.get(individuals[i]));
76 } 61 }
77 auxiliaryAtoms.put(getAuxiliaryAtom(original, headAtom, individuals), clauses); 62 auxiliaryAtoms.put(getAuxiliaryAtom(original, headAtom, individuals), clauses);
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj1.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj1.java
index e72ed96..d6b5e8b 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj1.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj1.java
@@ -1,26 +1,14 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import java.util.LinkedList; 3import org.semanticweb.HermiT.model.*;
4
5import org.semanticweb.HermiT.model.AtLeastConcept;
6import org.semanticweb.HermiT.model.Atom;
7import org.semanticweb.HermiT.model.AtomicConcept;
8import org.semanticweb.HermiT.model.AtomicNegationConcept;
9import org.semanticweb.HermiT.model.AtomicRole;
10import org.semanticweb.HermiT.model.DLClause;
11import org.semanticweb.HermiT.model.DLPredicate;
12import org.semanticweb.HermiT.model.Individual;
13import org.semanticweb.HermiT.model.Inequality;
14import org.semanticweb.HermiT.model.InverseRole;
15import org.semanticweb.HermiT.model.Term;
16import org.semanticweb.HermiT.model.Variable;
17
18import uk.ac.ox.cs.pagoda.MyPrefixes; 4import uk.ac.ox.cs.pagoda.MyPrefixes;
19import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 5import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
20import uk.ac.ox.cs.pagoda.multistage.Normalisation; 6import uk.ac.ox.cs.pagoda.multistage.Normalisation;
21import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 7import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
22import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
23import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 8import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
9import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
10
11import java.util.LinkedList;
24 12
25public class TrackingRuleEncoderDisj1 extends TrackingRuleEncoderDisj { 13public class TrackingRuleEncoderDisj1 extends TrackingRuleEncoderDisj {
26 14
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj2.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj2.java
index 6cf239f..8d79090 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj2.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisj2.java
@@ -1,24 +1,12 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import org.semanticweb.HermiT.model.AtLeastConcept; 3import org.semanticweb.HermiT.model.*;
4import org.semanticweb.HermiT.model.Atom;
5import org.semanticweb.HermiT.model.AtomicConcept;
6import org.semanticweb.HermiT.model.AtomicNegationConcept;
7import org.semanticweb.HermiT.model.AtomicRole;
8import org.semanticweb.HermiT.model.DLClause;
9import org.semanticweb.HermiT.model.DLPredicate;
10import org.semanticweb.HermiT.model.Individual;
11import org.semanticweb.HermiT.model.Inequality;
12import org.semanticweb.HermiT.model.InverseRole;
13import org.semanticweb.HermiT.model.Term;
14import org.semanticweb.HermiT.model.Variable;
15
16import uk.ac.ox.cs.pagoda.MyPrefixes; 4import uk.ac.ox.cs.pagoda.MyPrefixes;
17import uk.ac.ox.cs.pagoda.multistage.Normalisation; 5import uk.ac.ox.cs.pagoda.multistage.Normalisation;
18import uk.ac.ox.cs.pagoda.query.QueryRecord; 6import uk.ac.ox.cs.pagoda.query.QueryRecord;
19import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 7import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
20import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
21import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 8import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
9import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
22 10
23public class TrackingRuleEncoderDisj2 extends TrackingRuleEncoderDisj { 11public class TrackingRuleEncoderDisj2 extends TrackingRuleEncoderDisj {
24 12
@@ -56,7 +44,7 @@ public class TrackingRuleEncoderDisj2 extends TrackingRuleEncoderDisj {
56 44
57 @Override 45 @Override
58 protected DLPredicate generateAuxiliaryRule(AtLeastConcept p, DLClause original, Individual[] individuals) { 46 protected DLPredicate generateAuxiliaryRule(AtLeastConcept p, DLClause original, Individual[] individuals) {
59 DLPredicate ret = AtomicConcept.create(getTrackingPredicate(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p, individuals))); 47 DLPredicate ret = AtomicConcept.create(getTrackingPredicate(Normalisation.getAuxiliaryConcept4Disjunct(p, individuals)));
60 Atom[] headAtom = new Atom[] {Atom.create(ret, X)}; 48 Atom[] headAtom = new Atom[] {Atom.create(ret, X)};
61 49
62 AtomicRole role = p.getOnRole() instanceof AtomicRole ? 50 AtomicRole role = p.getOnRole() instanceof AtomicRole ?
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java
index 37116d4..d96c747 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar1.java
@@ -1,199 +1,188 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.HashSet;
5import java.util.LinkedList;
6import java.util.Set;
7
8import org.semanticweb.HermiT.model.AnnotatedEquality;
9import org.semanticweb.HermiT.model.AtLeast;
10import org.semanticweb.HermiT.model.AtLeastConcept;
11import org.semanticweb.HermiT.model.Atom;
12import org.semanticweb.HermiT.model.AtomicConcept;
13import org.semanticweb.HermiT.model.AtomicNegationConcept;
14import org.semanticweb.HermiT.model.AtomicRole;
15import org.semanticweb.HermiT.model.DLClause;
16import org.semanticweb.HermiT.model.DLPredicate;
17import org.semanticweb.HermiT.model.DatatypeRestriction;
18import org.semanticweb.HermiT.model.Equality;
19import org.semanticweb.HermiT.model.Inequality;
20import org.semanticweb.HermiT.model.InverseRole;
21import org.semanticweb.HermiT.model.Variable;
22
23import uk.ac.ox.cs.pagoda.MyPrefixes; 4import uk.ac.ox.cs.pagoda.MyPrefixes;
24import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 5import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
25import uk.ac.ox.cs.pagoda.multistage.Normalisation; 6import uk.ac.ox.cs.pagoda.multistage.Normalisation;
26import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 7import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
27import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
28import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 8import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
9import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
29import uk.ac.ox.cs.pagoda.util.Namespace; 10import uk.ac.ox.cs.pagoda.util.Namespace;
30import uk.ac.ox.cs.pagoda.util.Utility; 11import uk.ac.ox.cs.pagoda.util.Utility;
31 12
13import java.util.Collection;
14import java.util.HashSet;
15import java.util.LinkedList;
16import java.util.Set;
17
32public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap { 18public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap {
33 19
20 private Set<DLClause> disjunctiveRules = new HashSet<DLClause>();
21 private Variable X = Variable.create("X"), Y = Variable.create("Y");
22 private String bottomTrackingProgram = null;
23
34 public TrackingRuleEncoderDisjVar1(UpperDatalogProgram program, BasicQueryEngine store) { 24 public TrackingRuleEncoderDisjVar1(UpperDatalogProgram program, BasicQueryEngine store) {
35 super(program, store); 25 super(program, store);
36 } 26 }
37 27
38 private Set<DLClause> disjunctiveRules = new HashSet<DLClause>();
39
40 @Override 28 @Override
41 public boolean encodingRules() { 29 public boolean encodingRules() {
42 if (super.encodingRules()) { 30 if (super.encodingRules()) {
43 processDisjunctiveRules(); 31 processDisjunctiveRules();
44 return true; 32 return true;
45 } 33 }
46 return false; 34 return false;
47 } 35 }
48 36
49 @Override 37 @Override
50 protected void encodingRule(DLClause clause) { 38 protected void encodingRule(DLClause clause) {
51 if (currentQuery.isBottom()) { 39 if (currentQuery.isBottom()) {
52// super.encodingRule(clause); 40// super.encodingRule(clause);
53 encodingBottomQueryClause(clause); 41 encodingBottomQueryClause(clause);
54 return ; 42 return;
55 } 43 }
56 44
57 DLClause original = program.getCorrespondingClause(clause); 45 DLClause original = program.getCorrespondingClause(clause);
58 if (original.getHeadLength() <= 1) { 46 if (original.getHeadLength() <= 1) {
59 super.encodingRule(clause); 47 super.encodingRule(clause);
60 } 48 }
61 else { 49 else {
62 if (!DLClauseHelper.hasSubsetBodyAtoms(clause, original)) 50 if (!DLClauseHelper.hasSubsetBodyAtoms(clause, original))
63 super.encodingRule(clause); 51 super.encodingRule(clause);
64 addDisjunctiveRule(original); 52 addDisjunctiveRule(original);
65 } 53 }
66 54
67 } 55 }
68
69 56
70 private void processDisjunctiveRules() { 57 private void processDisjunctiveRules() {
71 for (DLClause clause: disjunctiveRules) 58 for (DLClause clause: disjunctiveRules)
72 encodingDisjunctiveRule(clause); 59 encodingDisjunctiveRule(clause);
73 } 60 }
74 61
75 private Atom getAuxiliaryAtom(Atom headAtom) { 62 private Atom getAuxiliaryAtom(Atom headAtom) {
76 DLPredicate p = headAtom.getDLPredicate(); 63 DLPredicate p = headAtom.getDLPredicate();
77 if (p instanceof AtLeast || p instanceof AtLeast) { 64 if (p instanceof AtLeast || p instanceof AtLeast) {
78 return Atom.create(generateAuxiliaryRule((AtLeast) p, true), headAtom.getArgument(0)); 65 return Atom.create(generateAuxiliaryRule((AtLeast) p, true), headAtom.getArgument(0));
79 } 66 }
80 if (p instanceof AtomicConcept) 67 if(p instanceof AtomicConcept)
81 return Atom.create(generateAuxiliaryRule((AtomicConcept) p), headAtom.getArgument(0)); 68 return Atom.create(generateAuxiliaryRule((AtomicConcept) p), headAtom.getArgument(0));
82 if (p instanceof AtomicRole) 69 if(p instanceof AtomicRole)
83 return Atom.create(generateAuxiliaryRule((AtomicRole) p), headAtom.getArgument(0), headAtom.getArgument(1)); 70 return Atom.create(generateAuxiliaryRule((AtomicRole) p), headAtom.getArgument(0), headAtom.getArgument(1));
84 if (p instanceof Equality || p instanceof AnnotatedEquality) 71 if(p instanceof Equality || p instanceof AnnotatedEquality)
85 return Atom.create(generateAuxiliaryRule(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); 72 return Atom.create(generateAuxiliaryRule(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1));
86 if (p instanceof Inequality) 73 if(p instanceof Inequality)
87 return Atom.create(generateAuxiliaryRule((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1)); 74 return Atom.create(generateAuxiliaryRule((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1));
88 75
89 return null; 76 return null;
90 } 77 }
91 78
92 private Atom getTrackingAtom(Atom headAtom) { 79 private Atom getTrackingAtom(Atom headAtom) {
93 DLPredicate p = headAtom.getDLPredicate(); 80 DLPredicate p = headAtom.getDLPredicate();
94 if (p instanceof AtLeast) { 81 if (p instanceof AtLeast) {
95 p = Normalisation.toAtLeastConcept((AtLeast) p); 82 p = Normalisation.toAtLeastConcept((AtLeast) p);
96 return Atom.create(getTrackingDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom.getArgument(0)); 83 return Atom.create(getTrackingDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom
84 .getArgument(0));
97 } 85 }
98 if (p instanceof AtomicConcept) 86 if(p instanceof AtomicConcept)
99 return Atom.create(getTrackingDLPredicate((AtomicConcept) p), headAtom.getArgument(0)); 87 return Atom.create(getTrackingDLPredicate(p), headAtom.getArgument(0));
100 if (p instanceof AtomicRole) 88 if(p instanceof AtomicRole)
101 return Atom.create(getTrackingDLPredicate((AtomicRole) p), headAtom.getArgument(0), headAtom.getArgument(1)); 89 return Atom.create(getTrackingDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1));
102 if (p instanceof Equality || p instanceof AnnotatedEquality) 90 if(p instanceof Equality || p instanceof AnnotatedEquality)
103 return Atom.create(getTrackingDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); 91 return Atom.create(getTrackingDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1));
104 if (p instanceof Inequality) 92 if(p instanceof Inequality)
105 return Atom.create(getTrackingDLPredicate((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1)); 93 return Atom.create(getTrackingDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1));
106 94
107 return null; 95 return null;
108 } 96 }
109 97
110 private Atom getGapAtom(Atom headAtom) { 98 private Atom getGapAtom(Atom headAtom) {
111 DLPredicate p = headAtom.getDLPredicate(); 99 DLPredicate p = headAtom.getDLPredicate();
112 if (p instanceof AtLeast) { 100 if (p instanceof AtLeast) {
113 p = Normalisation.toAtLeastConcept((AtLeast) p); 101 p = Normalisation.toAtLeastConcept((AtLeast) p);
114 return Atom.create(getGapDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom.getArgument(0)); 102 return Atom.create(getGapDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom
103 .getArgument(0));
115 } 104 }
116 if (p instanceof AtomicConcept) 105 if(p instanceof AtomicConcept)
117 return Atom.create(getGapDLPredicate((AtomicConcept) p), headAtom.getArgument(0)); 106 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0));
118 if (p instanceof AtomicRole) 107 if(p instanceof AtomicRole)
119 return Atom.create(getGapDLPredicate((AtomicRole) p), headAtom.getArgument(0), headAtom.getArgument(1)); 108 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1));
120 if (p instanceof Equality || p instanceof AnnotatedEquality) 109 if(p instanceof Equality || p instanceof AnnotatedEquality)
121 return Atom.create(getGapDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); 110 return Atom.create(getGapDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1));
122 if (p instanceof Inequality) 111 if(p instanceof Inequality)
123 return Atom.create(getGapDLPredicate((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1)); 112 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1));
124 if (p instanceof DatatypeRestriction) 113 if (p instanceof DatatypeRestriction)
125 return Atom.create(getGapDLPredicate((DatatypeRestriction) p), headAtom.getArgument(0)); 114 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0));
126 Utility.logError(p + " is not recognised."); 115 Utility.logError(p + " is not recognised.");
127 return null; 116 return null;
128 } 117 }
129 118
130 private void encodingDisjunctiveRule(DLClause clause) { 119 private void encodingDisjunctiveRule(DLClause clause) {
131 int headLength = clause.getHeadLength(); 120 int headLength = clause.getHeadLength();
132 121
133 Atom[] auxAtoms = new Atom[headLength]; 122 Atom[] auxAtoms = new Atom[headLength];
134 for (int i = 0; i < headLength; ++i) 123 for (int i = 0; i < headLength; ++i)
135 auxAtoms[i] = getAuxiliaryAtom(clause.getHeadAtom(i)); 124 auxAtoms[i] = getAuxiliaryAtom(clause.getHeadAtom(i));
136 125
137 Atom[] trackingAtoms = new Atom[headLength]; 126 Atom[] trackingAtoms = new Atom[headLength];
138 for (int i = 0; i < headLength; ++i) 127 for (int i = 0; i < headLength; ++i)
139 trackingAtoms[i] = getTrackingAtom(clause.getHeadAtom(i)); 128 trackingAtoms[i] = getTrackingAtom(clause.getHeadAtom(i));
140 129
141 Atom[] gapAtoms = new Atom[headLength]; 130 Atom[] gapAtoms = new Atom[headLength];
142 for (int i = 0; i < headLength; ++i) 131 for (int i = 0; i < headLength; ++i)
143 gapAtoms[i] = getGapAtom(clause.getHeadAtom(i)); 132 gapAtoms[i] = getGapAtom(clause.getHeadAtom(i));
144 133
145 Atom[] bodyAtoms = clause.getBodyAtoms(); 134 Atom[] bodyAtoms = clause.getBodyAtoms();
146 135
147 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>(); 136 LinkedList<Atom> newHeadAtoms = new LinkedList<Atom>();
148 DLPredicate selected = AtomicConcept.create(getSelectedPredicate()); 137 DLPredicate selected = AtomicConcept.create(getSelectedPredicate());
149 newHeadAtoms.add(Atom.create(selected, getIndividual4GeneralRule(clause))); 138 newHeadAtoms.add(Atom.create(selected, getIndividual4GeneralRule(clause)));
150 139
151 for (Atom atom: bodyAtoms) { 140 for (Atom atom: bodyAtoms) {
152 Atom newAtom = Atom.create( 141 Atom newAtom = Atom.create(
153 getTrackingDLPredicate(atom.getDLPredicate()), 142 getTrackingDLPredicate(atom.getDLPredicate()),
154 DLClauseHelper.getArguments(atom)); 143 DLClauseHelper.getArguments(atom));
155 newHeadAtoms.add(newAtom); 144 newHeadAtoms.add(newAtom);
156 } 145 }
157 146
158 DLClause newClause; 147 DLClause newClause;
159 int index; 148 int index;
160 for (int j = 0; j < headLength; ++j) { 149 for (int j = 0; j < headLength; ++j) {
161 Atom[] newBodyAtoms = new Atom[headLength * 2 + bodyAtoms.length]; 150 Atom[] newBodyAtoms = new Atom[headLength * 2 + bodyAtoms.length];
162 index = 0; 151 index = 0;
163 for (int i = 0; i < headLength; ++i, ++index) 152 for (int i = 0; i < headLength; ++i, ++index)
164 newBodyAtoms[index] = gapAtoms[i]; 153 newBodyAtoms[index] = gapAtoms[i];
165 for (int i = 0; i < headLength; ++i, ++index) 154 for (int i = 0; i < headLength; ++i, ++index)
166 if (i != j) 155 if (i != j)
167 newBodyAtoms[index] = auxAtoms[i]; 156 newBodyAtoms[index] = auxAtoms[i];
168 else 157 else
169 newBodyAtoms[index] = trackingAtoms[i]; 158 newBodyAtoms[index] = trackingAtoms[i];
170 159
171 for (int i = 0; i < bodyAtoms.length; ++i, ++index) 160 for (int i = 0; i < bodyAtoms.length; ++i, ++index)
172 newBodyAtoms[index] = bodyAtoms[i]; 161 newBodyAtoms[index] = bodyAtoms[i];
173 162
174 for (Atom atom: newHeadAtoms) { 163 for (Atom atom: newHeadAtoms) {
175 newClause = DLClause.create(new Atom[] {atom}, newBodyAtoms); 164 newClause = DLClause.create(new Atom[]{atom}, newBodyAtoms);
176 addTrackingClause(newClause); 165 addTrackingClause(newClause);
177 } 166 }
178 } 167 }
179 } 168 }
180 169
181 private void addTrackingClause(DLClause clause) { 170 private void addTrackingClause(DLClause clause) {
182 trackingClauses.add(clause); 171 trackingClauses.add(clause);
183 } 172 }
184 173
185 private void addDisjunctiveRule(DLClause clause) { 174 private void addDisjunctiveRule(DLClause clause) {
186 disjunctiveRules.add(clause); 175 disjunctiveRules.add(clause);
187 } 176 }
188 177
189 private DLPredicate getAuxPredicate(DLPredicate p) { 178 private DLPredicate getAuxPredicate(DLPredicate p) {
190 if (p instanceof AtLeastConcept) { 179 if (p instanceof AtLeastConcept) {
191 StringBuilder builder = new StringBuilder( 180 StringBuilder builder = new StringBuilder(
192 Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p)); 181 Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p));
193 builder.append("_AUXa").append(currentQuery.getQueryID()); 182 builder.append("_AUXa").append(currentQuery.getQueryID());
194 return AtomicConcept.create(builder.toString()); 183 return AtomicConcept.create(builder.toString());
195 } 184 }
196 185
197 return getDLPredicate(p, "_AUXa" + currentQuery.getQueryID()); 186 return getDLPredicate(p, "_AUXa" + currentQuery.getQueryID());
198 } 187 }
199 188
@@ -202,120 +191,118 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap {
202 } 191 }
203 192
204 private DLPredicate generateAuxiliaryRule(AtLeast p1, boolean withAux) { 193 private DLPredicate generateAuxiliaryRule(AtLeast p1, boolean withAux) {
205 AtLeastConcept p = Normalisation.toAtLeastConcept(p1); 194 AtLeastConcept p = Normalisation.toAtLeastConcept(p1);
206 195
207 int num = p.getNumber(); 196 int num = p.getNumber();
208 Variable[] Ys = new Variable[num]; 197 Variable[] Ys = new Variable[num];
209 if (num > 1) 198 if (num > 1)
210 for (int i = 0; i < num; ++i) 199 for(int i = 0; i < num; ++i)
211 Ys[i] = Variable.create("Y" + (i + 1)); 200 Ys[i] = Variable.create("Y" + (i + 1));
212 else 201 else
213 Ys[0] = Y; 202 Ys[0] = Y;
214 203
215 Collection<Atom> expandedAtom = new LinkedList<Atom>(); 204 Collection<Atom> expandedAtom = new LinkedList<Atom>();
216 Collection<Atom> representativeAtom = new LinkedList<Atom>(); 205 Collection<Atom> representativeAtom = new LinkedList<Atom>();
217 if (p.getOnRole() instanceof AtomicRole) { 206 if (p.getOnRole() instanceof AtomicRole) {
218 AtomicRole r = (AtomicRole) p.getOnRole(); 207 AtomicRole r = (AtomicRole) p.getOnRole();
219 for (int i = 0; i < num; ++i) 208 for(int i = 0; i < num; ++i)
220 expandedAtom.add(Atom.create(r, X, Ys[i])); 209 expandedAtom.add(Atom.create(r, X, Ys[i]));
221 representativeAtom.add(Atom.create(r, X, Ys[0])); 210 representativeAtom.add(Atom.create(r, X, Ys[0]));
222 } 211 }
223 else { 212 else {
224 AtomicRole r = ((InverseRole) p.getOnRole()).getInverseOf(); 213 AtomicRole r = ((InverseRole) p.getOnRole()).getInverseOf();
225 for (int i = 0; i < num; ++i) 214 for(int i = 0; i < num; ++i)
226 expandedAtom.add(Atom.create(r, Ys[i], X)); 215 expandedAtom.add(Atom.create(r, Ys[i], X));
227 representativeAtom.add(Atom.create(r, Ys[0], X)); 216 representativeAtom.add(Atom.create(r, Ys[0], X));
228 } 217 }
229 218
230 if (num > 1) { 219 if (num > 1) {
231 representativeAtom.add(Atom.create(Inequality.INSTANCE, Ys[0], Ys[1])); 220 representativeAtom.add(Atom.create(Inequality.INSTANCE, Ys[0], Ys[1]));
232 } 221 }
233 for (int i = 0; i < num; ++i) 222 for (int i = 0; i < num; ++i)
234 for (int j = i + 1; j < num; ++j) 223 for (int j = i + 1; j < num; ++j)
235 expandedAtom.add(Atom.create(Inequality.INSTANCE, Ys[i], Ys[j])); 224 expandedAtom.add(Atom.create(Inequality.INSTANCE, Ys[i], Ys[j]));
236 225
237 if (!p.getToConcept().equals(AtomicConcept.THING)) { 226 if (!p.getToConcept().equals(AtomicConcept.THING)) {
238 AtomicConcept c; 227 AtomicConcept c;
239 if (p.getToConcept() instanceof AtomicConcept) 228 if(p.getToConcept() instanceof AtomicConcept)
240 c = (AtomicConcept) p.getToConcept(); 229 c = (AtomicConcept) p.getToConcept();
241 else { 230 else {
242 c = OverApproxExist.getNegationConcept(((AtomicNegationConcept) p.getToConcept()).getNegatedAtomicConcept()); 231 c = OverApproxExist.getNegationConcept(((AtomicNegationConcept) p.getToConcept()).getNegatedAtomicConcept());
243 } 232 }
244 for (int i = 0; i < num; ++i) 233 for (int i = 0; i < num; ++i)
245 expandedAtom.add(Atom.create(c, Ys[i])); 234 expandedAtom.add(Atom.create(c, Ys[i]));
246 representativeAtom.add(Atom.create(c, Ys[0])); 235 representativeAtom.add(Atom.create(c, Ys[0]));
247 } 236 }
248 237
249 AtomicConcept ac = AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct(p)); 238 AtomicConcept ac = AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct(p));
250 DLPredicate trackingPredicate = getTrackingDLPredicate(ac); 239 DLPredicate trackingPredicate = getTrackingDLPredicate(ac);
251 DLPredicate gapPredicate = getGapDLPredicate(ac); 240 DLPredicate gapPredicate = getGapDLPredicate(ac);
252 DLPredicate auxPredicate = withAux ? getAuxPredicate(p) : null; 241 DLPredicate auxPredicate = withAux ? getAuxPredicate(p) : null;
253 242
254 for (Atom atom: representativeAtom) { 243 for (Atom atom: representativeAtom) {
255 Atom[] bodyAtoms = new Atom[expandedAtom.size() + 1]; 244 Atom[] bodyAtoms = new Atom[expandedAtom.size() + 1];
256 if (atom.getArity() == 1) 245 if (atom.getArity() == 1)
257 bodyAtoms[0] = Atom.create(getTrackingDLPredicate(atom.getDLPredicate()), atom.getArgument(0)); 246 bodyAtoms[0] = Atom.create(getTrackingDLPredicate(atom.getDLPredicate()), atom.getArgument(0));
258 else 247 else
259 bodyAtoms[0] = Atom.create(getTrackingDLPredicate(atom.getDLPredicate()), atom.getArgument(0), atom.getArgument(1)); 248 bodyAtoms[0] = Atom.create(getTrackingDLPredicate(atom.getDLPredicate()), atom.getArgument(0), atom.getArgument(1));
260 int i = 0; 249 int i = 0;
261 for (Atom bodyAtom: expandedAtom) 250 for (Atom bodyAtom: expandedAtom)
262 bodyAtoms[++i] = bodyAtom; 251 bodyAtoms[++i] = bodyAtom;
263 addTrackingClause(DLClause.create(new Atom[] {Atom.create(trackingPredicate, X)}, bodyAtoms)); 252 addTrackingClause(DLClause.create(new Atom[] {Atom.create(trackingPredicate, X)}, bodyAtoms));
264 253
265 bodyAtoms = new Atom[expandedAtom.size() + 1]; 254 bodyAtoms = new Atom[expandedAtom.size() + 1];
266 if (atom.getArity() == 1) 255 if (atom.getArity() == 1)
267 bodyAtoms[0] = Atom.create(getGapDLPredicate(atom.getDLPredicate()), atom.getArgument(0)); 256 bodyAtoms[0] = Atom.create(getGapDLPredicate(atom.getDLPredicate()), atom.getArgument(0));
268 else 257 else
269 bodyAtoms[0] = Atom.create(getGapDLPredicate(atom.getDLPredicate()), atom.getArgument(0), atom.getArgument(1)); 258 bodyAtoms[0] = Atom.create(getGapDLPredicate(atom.getDLPredicate()), atom.getArgument(0), atom.getArgument(1));
270 i = 0; 259 i = 0;
271 for (Atom bodyAtom: expandedAtom) 260 for (Atom bodyAtom: expandedAtom)
272 bodyAtoms[++i] = bodyAtom; 261 bodyAtoms[++i] = bodyAtom;
273 addTrackingClause(DLClause.create(new Atom[] {Atom.create(gapPredicate, X)}, bodyAtoms)); 262 addTrackingClause(DLClause.create(new Atom[] {Atom.create(gapPredicate, X)}, bodyAtoms));
274 263
275 if (withAux) { 264 if (withAux) {
276 bodyAtoms = new Atom[expandedAtom.size() + 1]; 265 bodyAtoms = new Atom[expandedAtom.size() + 1];
277 bodyAtoms[0] = getAuxiliaryAtom(atom); 266 bodyAtoms[0] = getAuxiliaryAtom(atom);
278 i = 0; 267 i = 0;
279 for (Atom bodyAtom: expandedAtom) 268 for (Atom bodyAtom: expandedAtom)
280 bodyAtoms[++i] = bodyAtom; 269 bodyAtoms[++i] = bodyAtom;
281 addTrackingClause(DLClause.create(new Atom[] {Atom.create(auxPredicate, X)}, bodyAtoms)); 270 addTrackingClause(DLClause.create(new Atom[] {Atom.create(auxPredicate, X)}, bodyAtoms));
282 } 271 }
283 } 272 }
284 273
285 return withAux ? auxPredicate : trackingPredicate; 274 return withAux ? auxPredicate : trackingPredicate;
286 } 275 }
287 276
288 private DLPredicate generateAuxiliaryRule(AtomicRole p) { 277 private DLPredicate generateAuxiliaryRule(AtomicRole p) {
289 if (currentQuery.isBottom()) 278 if(currentQuery.isBottom())
290 return getTrackingDLPredicate(p); 279 return getTrackingDLPredicate(p);
291 280
292 DLPredicate ret = getAuxPredicate(p); 281 DLPredicate ret = getAuxPredicate(p);
293 Atom[] headAtom = new Atom[] {Atom.create(ret, X, Y)}; 282 Atom[] headAtom = new Atom[] {Atom.create(ret, X, Y)};
294 283
295 addTrackingClause( 284 addTrackingClause(
296 DLClause.create(headAtom, new Atom[] {Atom.create(getTrackingDLPredicate(p), X, Y)})); 285 DLClause.create(headAtom, new Atom[]{Atom.create(getTrackingDLPredicate(p), X, Y)}));
297 addTrackingClause( 286 addTrackingClause(
298 DLClause.create(headAtom, new Atom[] {Atom.create(getTrackingBottomDLPredicate(p), X, Y)})); 287 DLClause.create(headAtom, new Atom[]{Atom.create(getTrackingBottomDLPredicate(p), X, Y)}));
299 288
300 return ret; 289 return ret;
301 } 290 }
302
303 private Variable X = Variable.create("X"), Y = Variable.create("Y");
304 291
305 private DLPredicate generateAuxiliaryRule(AtomicConcept p) { 292 private DLPredicate generateAuxiliaryRule(AtomicConcept p) {
306 if (currentQuery.isBottom()) 293 if (currentQuery.isBottom())
307 return getTrackingDLPredicate(p); 294 return getTrackingDLPredicate(p);
308 295
309 DLPredicate ret = getAuxPredicate(p); 296 DLPredicate ret = getAuxPredicate(p);
310 Atom[] headAtom = new Atom[] {Atom.create(ret, X)}; 297 Atom[] headAtom = new Atom[]{Atom.create(ret, X)};
311 addTrackingClause( 298 addTrackingClause(
312 DLClause.create(headAtom, 299 DLClause.create(headAtom,
313 new Atom[] { Atom.create(getTrackingDLPredicate(p), X)})); 300 new Atom[]{Atom.create(getTrackingDLPredicate(p), X)}));
314 addTrackingClause( 301 addTrackingClause(
315 DLClause.create(headAtom, 302 DLClause.create(headAtom,
316 new Atom[] { Atom.create(getTrackingBottomDLPredicate(p), X)})); 303 new Atom[] { Atom.create(getTrackingBottomDLPredicate(p), X)}));
317 304
318 return ret; 305 return ret;
319 } 306 }
320 307
321 private DLPredicate generateAuxiliaryRule(Equality instance) { 308 private DLPredicate generateAuxiliaryRule(Equality instance) {
@@ -323,20 +310,18 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap {
323 } 310 }
324 311
325 private DLPredicate generateAuxiliaryRule(Inequality instance) { 312 private DLPredicate generateAuxiliaryRule(Inequality instance) {
326 return generateAuxiliaryRule(AtomicRole.create(Namespace.INEQUALITY)); 313 return generateAuxiliaryRule(AtomicRole.create(Namespace.INEQUALITY));
327 } 314 }
328 315
329 @Override 316 @Override
330 public String getTrackingProgram() { 317 public String getTrackingProgram() {
331 StringBuilder sb = getTrackingProgramBody(); 318 StringBuilder sb = getTrackingProgramBody();
332 if (currentQuery.isBottom()) 319 if (currentQuery.isBottom())
333 sb.append(getBottomTrackingProgram()); 320 sb.append(getBottomTrackingProgram());
334 sb.insert(0, MyPrefixes.PAGOdAPrefixes.prefixesText()); 321 sb.insert(0, MyPrefixes.PAGOdAPrefixes.prefixesText());
335 return sb.toString(); 322 return sb.toString();
336 } 323 }
337 324
338 private String bottomTrackingProgram = null;
339
340 private String getBottomTrackingProgram() { 325 private String getBottomTrackingProgram() {
341 if (bottomTrackingProgram != null) return bottomTrackingProgram.replace("_tn", getTrackingPredicate("")); 326 if (bottomTrackingProgram != null) return bottomTrackingProgram.replace("_tn", getTrackingPredicate(""));
342 327
@@ -364,7 +349,7 @@ public class TrackingRuleEncoderDisjVar1 extends TrackingRuleEncoderWithGap {
364// for (Atom tAtom: clause.getHeadAtoms()) { 349// for (Atom tAtom: clause.getHeadAtoms()) {
365// for (int i = 0; i < tAtom.getArity(); ++i) 350// for (int i = 0; i < tAtom.getArity(); ++i)
366// if ((t = tAtom.getArgument(i)) instanceof Individual) 351// if ((t = tAtom.getArgument(i)) instanceof Individual)
367// if (((Individual) t).getIRI().startsWith(OverApproxExist.skolemisedIndividualPrefix)) 352// if (((Individual) t).getIRI().startsWith(OverApproxExist.SKOLEMISED_INDIVIDUAL_PREFIX))
368// clause = program.getCorrespondingClause(clause); 353// clause = program.getCorrespondingClause(clause);
369// } 354// }
370 355
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar2.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar2.java
index d257de3..7311a86 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar2.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderDisjVar2.java
@@ -1,31 +1,19 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.HashSet;
5import java.util.LinkedList;
6import java.util.Set;
7
8import org.semanticweb.HermiT.model.AnnotatedEquality;
9import org.semanticweb.HermiT.model.AtLeastConcept;
10import org.semanticweb.HermiT.model.Atom;
11import org.semanticweb.HermiT.model.AtomicConcept;
12import org.semanticweb.HermiT.model.AtomicNegationConcept;
13import org.semanticweb.HermiT.model.AtomicRole;
14import org.semanticweb.HermiT.model.DLClause;
15import org.semanticweb.HermiT.model.DLPredicate;
16import org.semanticweb.HermiT.model.Equality;
17import org.semanticweb.HermiT.model.Inequality;
18import org.semanticweb.HermiT.model.InverseRole;
19import org.semanticweb.HermiT.model.Variable;
20
21import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 4import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
22import uk.ac.ox.cs.pagoda.multistage.Normalisation; 5import uk.ac.ox.cs.pagoda.multistage.Normalisation;
23import uk.ac.ox.cs.pagoda.query.QueryRecord; 6import uk.ac.ox.cs.pagoda.query.QueryRecord;
24import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 7import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
25import uk.ac.ox.cs.pagoda.rules.OverApproxExist;
26import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 8import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
9import uk.ac.ox.cs.pagoda.rules.approximators.OverApproxExist;
27import uk.ac.ox.cs.pagoda.util.Namespace; 10import uk.ac.ox.cs.pagoda.util.Namespace;
28 11
12import java.util.Collection;
13import java.util.HashSet;
14import java.util.LinkedList;
15import java.util.Set;
16
29public class TrackingRuleEncoderDisjVar2 extends TrackingRuleEncoderWithGap { 17public class TrackingRuleEncoderDisjVar2 extends TrackingRuleEncoderWithGap {
30 18
31 public TrackingRuleEncoderDisjVar2(UpperDatalogProgram program, BasicQueryEngine store) { 19 public TrackingRuleEncoderDisjVar2(UpperDatalogProgram program, BasicQueryEngine store) {
@@ -91,13 +79,13 @@ public class TrackingRuleEncoderDisjVar2 extends TrackingRuleEncoderWithGap {
91 return Atom.create(getGapDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom.getArgument(0)); 79 return Atom.create(getGapDLPredicate(AtomicConcept.create(Normalisation.getAuxiliaryConcept4Disjunct((AtLeastConcept) p))), headAtom.getArgument(0));
92 } 80 }
93 if (p instanceof AtomicConcept) 81 if (p instanceof AtomicConcept)
94 return Atom.create(getGapDLPredicate((AtomicConcept) p), headAtom.getArgument(0)); 82 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0));
95 if (p instanceof AtomicRole) 83 if (p instanceof AtomicRole)
96 return Atom.create(getGapDLPredicate((AtomicRole) p), headAtom.getArgument(0), headAtom.getArgument(1)); 84 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1));
97 if (p instanceof Equality || p instanceof AnnotatedEquality) 85 if (p instanceof Equality || p instanceof AnnotatedEquality)
98 return Atom.create(getGapDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1)); 86 return Atom.create(getGapDLPredicate(Equality.INSTANCE), headAtom.getArgument(0), headAtom.getArgument(1));
99 if (p instanceof Inequality) 87 if (p instanceof Inequality)
100 return Atom.create(getGapDLPredicate((Inequality) p), headAtom.getArgument(0), headAtom.getArgument(1)); 88 return Atom.create(getGapDLPredicate(p), headAtom.getArgument(0), headAtom.getArgument(1));
101 89
102 return null; 90 return null;
103 } 91 }
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java
index 99028ca..c3694ad 100644
--- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java
+++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java
@@ -1,28 +1,21 @@
1package uk.ac.ox.cs.pagoda.tracking; 1package uk.ac.ox.cs.pagoda.tracking;
2 2
3import java.util.Collection; 3import org.semanticweb.HermiT.model.*;
4import java.util.HashSet;
5import java.util.LinkedList;
6import java.util.Set;
7
8import org.semanticweb.HermiT.model.AnnotatedEquality;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicConcept;
11import org.semanticweb.HermiT.model.AtomicRole;
12import org.semanticweb.HermiT.model.DLClause;
13import org.semanticweb.HermiT.model.Equality;
14import org.semanticweb.HermiT.model.Variable;
15import org.semanticweb.owlapi.model.IRI; 4import org.semanticweb.owlapi.model.IRI;
16import org.semanticweb.owlapi.model.OWLClass; 5import org.semanticweb.owlapi.model.OWLClass;
17import org.semanticweb.owlapi.model.OWLObjectProperty; 6import org.semanticweb.owlapi.model.OWLObjectProperty;
18import org.semanticweb.owlapi.model.OWLOntology; 7import org.semanticweb.owlapi.model.OWLOntology;
19
20import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; 8import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper;
21import uk.ac.ox.cs.pagoda.query.*; 9import uk.ac.ox.cs.pagoda.query.GapTupleIterator;
22import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; 10import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
23import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; 11import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram;
24import uk.ac.ox.cs.pagoda.util.Namespace; 12import uk.ac.ox.cs.pagoda.util.Namespace;
25 13
14import java.util.Collection;
15import java.util.HashSet;
16import java.util.LinkedList;
17import java.util.Set;
18
26public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder { 19public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder {
27 20
28 public TrackingRuleEncoderWithGap(UpperDatalogProgram program, BasicQueryEngine store) { 21 public TrackingRuleEncoderWithGap(UpperDatalogProgram program, BasicQueryEngine store) {
@@ -37,9 +30,9 @@ public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder {
37 Variable X = Variable.create("X"); 30 Variable X = Variable.create("X");
38 AtomicRole trackingSameAs = AtomicRole.create(Namespace.EQUALITY + "_tn"); 31 AtomicRole trackingSameAs = AtomicRole.create(Namespace.EQUALITY + "_tn");
39 OWLOntology onto = program.getOntology(); 32 OWLOntology onto = program.getOntology();
40 Atom[] headAtom = new Atom[] {Atom.create(trackingSameAs, X, X)}, bodyAtom; 33 Atom[] headAtom = new Atom[]{Atom.create(trackingSameAs, X, X)}, bodyAtom;
41 for (OWLOntology o: onto.getImportsClosure()) 34 for(OWLOntology o : onto.getImportsClosure())
42 for (OWLClass cls: o.getClassesInSignature()) { 35 for(OWLClass cls : o.getClassesInSignature()) {
43 String clsIRI = cls.getIRI().toString(); 36 String clsIRI = cls.getIRI().toString();
44 unaryPredicates.add(clsIRI); 37 unaryPredicates.add(clsIRI);
45 bodyAtom = new Atom[] { 38 bodyAtom = new Atom[] {
@@ -47,14 +40,16 @@ public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder {
47 Atom.create(AtomicConcept.create(GapTupleIterator.getGapPredicate(clsIRI)), X)}; 40 Atom.create(AtomicConcept.create(GapTupleIterator.getGapPredicate(clsIRI)), X)};
48 equalityRelatedClauses.add(DLClause.create(headAtom, bodyAtom)); 41 equalityRelatedClauses.add(DLClause.create(headAtom, bodyAtom));
49 } 42 }
50 43
51 Variable Y = Variable.create("Y"); 44 Variable Y = Variable.create("Y");
52 Set<OWLObjectProperty> setOfProperties = new HashSet<OWLObjectProperty>(); 45 Set<OWLObjectProperty> setOfProperties = new HashSet<OWLObjectProperty>();
53 for (OWLOntology o: onto.getImportsClosure()) 46 for(OWLOntology o : onto.getImportsClosure())
54 for (OWLObjectProperty prop: o.getObjectPropertiesInSignature()) 47 for(OWLObjectProperty prop : o.getObjectPropertiesInSignature())
55 setOfProperties.add(prop); 48 setOfProperties.add(prop);
56 setOfProperties.add(onto.getOWLOntologyManager().getOWLDataFactory().getOWLObjectProperty(IRI.create(Namespace.INEQUALITY))); 49 setOfProperties.add(onto.getOWLOntologyManager()
57 for (OWLObjectProperty prop: setOfProperties) { 50 .getOWLDataFactory()
51 .getOWLObjectProperty(IRI.create(Namespace.INEQUALITY)));
52 for(OWLObjectProperty prop : setOfProperties) {
58 String propIRI = prop.getIRI().toString(); 53 String propIRI = prop.getIRI().toString();
59 binaryPredicates.add(propIRI); 54 binaryPredicates.add(propIRI);
60 AtomicRole trackingRole = AtomicRole.create(propIRI + "_tn"); 55 AtomicRole trackingRole = AtomicRole.create(propIRI + "_tn");
diff --git a/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java b/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java
new file mode 100644
index 0000000..be6627a
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/util/PagodaProperties.java
@@ -0,0 +1,126 @@
1package uk.ac.ox.cs.pagoda.util;
2
3import java.io.FileInputStream;
4import java.io.IOException;
5import java.io.InputStream;
6import java.util.Properties;
7
8public class PagodaProperties {
9
10 public static final String CONFIG_FILE = "pagoda.properties";
11
12 public static final boolean DEFAULT_DEBUG = false;
13 public static boolean shellModeDefault = false;
14 private static boolean debug = DEFAULT_DEBUG;
15
16 static {
17 try(InputStream in = PagodaProperties.class.getClassLoader().getResourceAsStream(CONFIG_FILE)) {
18 Properties config = new Properties();
19 config.load(in);
20 in.close();
21 if(config.containsKey("debug")) {
22 debug = Boolean.parseBoolean(config.getProperty("debug"));
23 }
24 } catch(IOException e) {
25 e.printStackTrace();
26 }
27 }
28
29 String dataPath = null;
30 String ontologyPath;
31 String queryPath = null;
32 String answerPath = null;
33 boolean toClassify = true;
34 boolean toCallHermiT = true;
35 boolean shellMode = shellModeDefault;
36
37 public PagodaProperties(String path) {
38 java.util.Properties m_properties = new java.util.Properties();
39 InputStream inputStream = null;
40 try {
41 inputStream = new FileInputStream(path);
42 m_properties.load(inputStream);
43
44 setOntologyPath(m_properties.getProperty("ONTOLOGY"));
45 setDataPath(m_properties.getProperty("DATA"));
46 setQueryPath(m_properties.getProperty("QUERY"));
47 setAnswerPath(m_properties.getProperty("ANSWER"));
48 setToClassify(Boolean.parseBoolean(m_properties.getProperty("TO_CLASSIFY")));
49 setToCallHermiT(Boolean.parseBoolean(m_properties.getProperty("CALL_HERMIT")));
50
51 } catch (IOException e) {
52 e.printStackTrace();
53 } finally {
54 if (inputStream != null)
55 try {
56 inputStream.close();
57 } catch (IOException e) {
58 e.printStackTrace();
59 }
60 }
61 }
62
63 public PagodaProperties() {
64 }
65
66 public static boolean isDebuggingMode() {
67 return debug;
68 }
69
70 public String getDataPath() {
71 return dataPath;
72 }
73
74 public void setDataPath(String path) {
75 dataPath = path;
76 }
77
78 public String getOntologyPath() {
79 return ontologyPath;
80 }
81
82 public void setOntologyPath(String path) {
83 ontologyPath = path;
84 }
85
86 public String getQueryPath() {
87 return queryPath;
88 }
89
90 public void setQueryPath(String path) {
91 queryPath = path;
92 }
93
94 public String getAnswerPath() {
95 return answerPath;
96 }
97
98 public void setAnswerPath(String path) {
99 answerPath = path;
100 }
101
102 public boolean getToClassify() {
103 return toClassify;
104 }
105
106 public void setToClassify(boolean flag) {
107 toClassify = flag;
108 }
109
110 public boolean getToCallHermiT() {
111 return toCallHermiT;
112 }
113
114 public void setToCallHermiT(boolean flag) {
115 toCallHermiT = flag;
116 }
117
118 public boolean getShellMode() {
119 return shellMode;
120 }
121
122 public void setShellMode(boolean flag) {
123 shellMode = flag;
124 }
125
126}
diff --git a/src/uk/ac/ox/cs/pagoda/util/Properties.java b/src/uk/ac/ox/cs/pagoda/util/Properties.java
deleted file mode 100644
index b687b53..0000000
--- a/src/uk/ac/ox/cs/pagoda/util/Properties.java
+++ /dev/null
@@ -1,66 +0,0 @@
1package uk.ac.ox.cs.pagoda.util;
2
3import java.io.FileInputStream;
4import java.io.IOException;
5import java.io.InputStream;
6
7public class Properties {
8
9 String dataPath = null;
10 public String getDataPath() { return dataPath; }
11 public void setDataPath(String path) { dataPath = path; }
12
13 String ontologyPath;
14 public String getOntologyPath() { return ontologyPath; }
15 public void setOntologyPath(String path) { ontologyPath = path; }
16
17 String queryPath = null;
18 public String getQueryPath() { return queryPath; }
19 public void setQueryPath(String path) { queryPath = path; }
20
21 String answerPath = null;
22 public String getAnswerPath() { return answerPath; }
23 public void setAnswerPath(String path) { answerPath = path; }
24
25 boolean toClassify = true;
26 public boolean getToClassify() { return toClassify; }
27 public void setToClassify(boolean flag) { toClassify = flag; }
28
29 boolean toCallHermiT = true;
30 public boolean getToCallHermiT() { return toCallHermiT; }
31 public void setToCallHermiT(boolean flag) { toCallHermiT = flag; }
32
33 public static boolean ShellModeDefault = false;
34
35 boolean shellMode = ShellModeDefault;
36 public boolean getShellMode() { return shellMode; }
37 public void setShellMode(boolean flag) { shellMode = flag; }
38
39 public Properties(String path) {
40 java.util.Properties m_properties = new java.util.Properties();
41 InputStream inputStream = null;
42 try {
43 inputStream = new FileInputStream(path);
44 m_properties.load(inputStream);
45
46 setOntologyPath(m_properties.getProperty("ONTOLOGY"));
47 setDataPath(m_properties.getProperty("DATA"));
48 setQueryPath(m_properties.getProperty("QUERY"));
49 setAnswerPath(m_properties.getProperty("ANSWER"));
50 setToClassify(Boolean.parseBoolean(m_properties.getProperty("TO_CLASSIFY")));
51 setToCallHermiT(Boolean.parseBoolean(m_properties.getProperty("CALL_HERMIT")));
52
53 } catch (IOException e) {
54 e.printStackTrace();
55 } finally {
56 if (inputStream != null)
57 try {
58 inputStream.close();
59 } catch (IOException e) {
60 e.printStackTrace();
61 }
62 }
63 }
64 public Properties() { }
65
66}
diff --git a/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java b/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java
index 31838bc..1e53b9c 100644
--- a/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java
+++ b/src/uk/ac/ox/cs/pagoda/util/SparqlHelper.java
@@ -1,49 +1,18 @@
1package uk.ac.ox.cs.pagoda.util; 1package uk.ac.ox.cs.pagoda.util;
2 2
3import java.util.Collection;
4import java.util.HashSet;
5import java.util.Set;
6
7import org.semanticweb.HermiT.model.AnnotatedEquality;
8import org.semanticweb.HermiT.model.AtLeastConcept;
9import org.semanticweb.HermiT.model.Atom;
10import org.semanticweb.HermiT.model.AtomicConcept;
11import org.semanticweb.HermiT.model.AtomicDataRange;
12import org.semanticweb.HermiT.model.AtomicRole;
13import org.semanticweb.HermiT.model.Constant;
14import org.semanticweb.HermiT.model.DLPredicate;
15import org.semanticweb.HermiT.model.Equality;
16import org.semanticweb.HermiT.model.Individual;
17import org.semanticweb.HermiT.model.Inequality;
18import org.semanticweb.HermiT.model.Term;
19import org.semanticweb.HermiT.model.Variable;
20
21import uk.ac.ox.cs.pagoda.MyPrefixes;
22import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
23
24import com.hp.hpl.jena.graph.Node; 3import com.hp.hpl.jena.graph.Node;
25import com.hp.hpl.jena.query.Query; 4import com.hp.hpl.jena.query.Query;
26import com.hp.hpl.jena.query.QueryFactory; 5import com.hp.hpl.jena.query.QueryFactory;
27import com.hp.hpl.jena.sparql.core.TriplePath; 6import com.hp.hpl.jena.sparql.core.TriplePath;
28import com.hp.hpl.jena.sparql.core.Var; 7import com.hp.hpl.jena.sparql.core.Var;
29import com.hp.hpl.jena.sparql.syntax.Element; 8import com.hp.hpl.jena.sparql.syntax.*;
30import com.hp.hpl.jena.sparql.syntax.ElementAssign; 9import org.semanticweb.HermiT.model.*;
31import com.hp.hpl.jena.sparql.syntax.ElementBind; 10import uk.ac.ox.cs.pagoda.MyPrefixes;
32import com.hp.hpl.jena.sparql.syntax.ElementData; 11import uk.ac.ox.cs.pagoda.hermit.RuleHelper;
33import com.hp.hpl.jena.sparql.syntax.ElementDataset; 12
34import com.hp.hpl.jena.sparql.syntax.ElementExists; 13import java.util.Collection;
35import com.hp.hpl.jena.sparql.syntax.ElementFilter; 14import java.util.HashSet;
36import com.hp.hpl.jena.sparql.syntax.ElementGroup; 15import java.util.Set;
37import com.hp.hpl.jena.sparql.syntax.ElementMinus;
38import com.hp.hpl.jena.sparql.syntax.ElementNamedGraph;
39import com.hp.hpl.jena.sparql.syntax.ElementNotExists;
40import com.hp.hpl.jena.sparql.syntax.ElementOptional;
41import com.hp.hpl.jena.sparql.syntax.ElementPathBlock;
42import com.hp.hpl.jena.sparql.syntax.ElementService;
43import com.hp.hpl.jena.sparql.syntax.ElementSubQuery;
44import com.hp.hpl.jena.sparql.syntax.ElementTriplesBlock;
45import com.hp.hpl.jena.sparql.syntax.ElementUnion;
46import com.hp.hpl.jena.sparql.syntax.ElementVisitor;
47 16
48public class SparqlHelper { 17public class SparqlHelper {
49 18
@@ -52,7 +21,7 @@ public class SparqlHelper {
52 for (int i = 0; i < atoms.length; ++i) { 21 for (int i = 0; i < atoms.length; ++i) {
53 atoms[i].getVariables(undistinguishedVars); 22 atoms[i].getVariables(undistinguishedVars);
54 } 23 }
55 int xIndex = 1; 24 int xIndex = 1;
56 while (undistinguishedVars.contains(Variable.create("X" + xIndex))) ++xIndex; 25 while (undistinguishedVars.contains(Variable.create("X" + xIndex))) ++xIndex;
57 26
58 for (String var: vars) 27 for (String var: vars)
diff --git a/src/uk/ac/ox/cs/pagoda/util/Utility.java b/src/uk/ac/ox/cs/pagoda/util/Utility.java
index 0edfac2..e98cc81 100644
--- a/src/uk/ac/ox/cs/pagoda/util/Utility.java
+++ b/src/uk/ac/ox/cs/pagoda/util/Utility.java
@@ -1,86 +1,89 @@
1package uk.ac.ox.cs.pagoda.util; 1package uk.ac.ox.cs.pagoda.util;
2 2
3import java.io.BufferedReader; 3import org.apache.log4j.Level;
4import java.io.BufferedWriter;
5import java.io.File;
6import java.io.FileInputStream;
7import java.io.FileNotFoundException;
8import java.io.FileOutputStream;
9import java.io.IOException;
10import java.io.InputStreamReader;
11import java.io.OutputStreamWriter;
12import java.io.PrintStream;
13import java.text.SimpleDateFormat;
14import java.time.LocalDateTime;
15import java.time.format.DateTimeFormatter;
16import java.util.Collection;
17import java.util.Date;
18import java.util.HashSet;
19import java.util.LinkedList;
20import java.util.Scanner;
21import java.util.Set;
22import java.util.Stack;
23
24import org.apache.log4j.Logger; 4import org.apache.log4j.Logger;
25import org.semanticweb.HermiT.model.Atom; 5import org.semanticweb.HermiT.model.Atom;
26 6
7import java.io.*;
8import java.nio.file.Files;
9import java.nio.file.Path;
10import java.text.SimpleDateFormat;
11import java.util.*;
12
27public class Utility { 13public class Utility {
28 14
29 private static final Logger LOGS = Logger.getLogger(""); // null; //
30
31 public static final String JAVA_FILE_SEPARATOR = "/"; 15 public static final String JAVA_FILE_SEPARATOR = "/";
32 public static final String FILE_SEPARATOR = System.getProperty("file.separator"); 16 public static final String FILE_SEPARATOR = System.getProperty("file.separator");
33 public static final String LINE_SEPARATOR = System.getProperty("line.separator"); 17 public static final String LINE_SEPARATOR = System.getProperty("line.separator");
34 18 public static final int TEST = -1;
35 public static final String TempDirectory = (new File("tmp" + DateTimeFormatter.ISO_LOCAL_DATE_TIME.format(LocalDateTime.now()))).getAbsolutePath() + FILE_SEPARATOR;
36
37 public static final int TEST = -1;
38 public static final int FLY = 0; 19 public static final int FLY = 0;
39 public static final int UOBM = 1; 20 public static final int UOBM = 1;
40 public static final int LUBM = 2; 21 public static final int LUBM = 2;
41 public static final int AEO = 3; 22 public static final int AEO = 3;
42 public static final int WINE = 4; 23 public static final int WINE = 4;
24 private static final String TEMP_DIR_PATH = "pagoda_tmp";
25 static Stack<PrintStream> outs = new Stack<PrintStream>();
26 private static Logger LOGS;
27 private static String tempDir;
28 private static int asciiX = (int) 'X';
29 private static StringBuilder logMessage = new StringBuilder();
43 30
44 public static Set<Atom> toSet(Atom[] data) 31 static {
45 { 32 LOGS = Logger.getLogger("PAGOdA");
33 LOGS.setLevel(Level.DEBUG);
34 }
35
36 static {
37 outs.push(System.out);
38 }
39
40 public static String getGlobalTempDirAbsolutePath() {
41 if(tempDir == null) {
42 try {
43 Path path = Files.createTempDirectory(TEMP_DIR_PATH);
44 tempDir = path.toString();
45 new File(tempDir).deleteOnExit();
46 } catch(IOException e) {
47 e.printStackTrace();
48 System.exit(1);
49 }
50 }
51 return tempDir;
52 }
53
54 public static Set<Atom> toSet(Atom[] data) {
46 HashSet<Atom> ret = new HashSet<Atom>(); 55 HashSet<Atom> ret = new HashSet<Atom>();
47 for (Atom element: data) 56 for(Atom element : data)
48 ret.add(element); 57 ret.add(element);
49 return ret; 58 return ret;
50 } 59 }
51 60
52 static Stack<PrintStream> outs = new Stack<PrintStream>();
53
54 static {
55 outs.push(System.out);
56 }
57
58 public static boolean redirectSystemOut() 61 public static boolean redirectSystemOut()
59 { 62 {
60 String stamp = new SimpleDateFormat( "HH:mm:ss").format(new Date()); 63 String stamp = new SimpleDateFormat( "HH:mm:ss").format(new Date());
61 return redirectCurrentOut("./console" + stamp + ".txt"); 64 return redirectCurrentOut("./console" + stamp + ".txt");
62 } 65 }
63 66
64 public static boolean redirectCurrentOut(String fileName) 67 public static boolean redirectCurrentOut(String fileName)
65 { 68 {
66 File file = new File(fileName); 69 File file = new File(fileName);
67 PrintStream out; 70 PrintStream out;
68 try { 71 try {
69 out = new PrintStream(new FileOutputStream(file)); 72 out = new PrintStream(new FileOutputStream(file));
70 } catch (FileNotFoundException e) { 73 } catch (FileNotFoundException e) {
71 e.printStackTrace(); 74 e.printStackTrace();
72 return false; 75 return false;
73 } 76 }
74 outs.push(out); 77 outs.push(out);
75 System.setOut(out); 78 System.setOut(out);
76 return true; 79 return true;
77 } 80 }
78 81
79 public static void closeCurrentOut() { 82 public static void closeCurrentOut() {
80 if (!outs.isEmpty()) 83 if (!outs.isEmpty())
81 outs.pop().close(); 84 outs.pop().close();
82 85
83 if (!outs.isEmpty()) 86 if(!outs.isEmpty())
84 System.setOut(outs.peek()); 87 System.setOut(outs.peek());
85 } 88 }
86 89
@@ -98,7 +101,7 @@ public class Utility {
98 while ((line = reader.readLine()) != null && !line.startsWith("}")) 101 while ((line = reader.readLine()) != null && !line.startsWith("}"))
99 if (first) { 102 if (first) {
100 first = false; 103 first = false;
101 query = expression(line.trim()); 104 query = expression(line.trim());
102 } 105 }
103 else query += ", " + expression(line.trim()); 106 else query += ", " + expression(line.trim());
104 writer.write(query); 107 writer.write(query);
@@ -117,8 +120,6 @@ public class Utility {
117 else return parts[1] + "(?" + variableIndex(parts[0]) + ",?" + variableIndex(parts[2]) + ")"; 120 else return parts[1] + "(?" + variableIndex(parts[0]) + ",?" + variableIndex(parts[2]) + ")";
118 } 121 }
119 122
120 private static int asciiX = (int)'X';
121
122 private static int variableIndex(String exp) { 123 private static int variableIndex(String exp) {
123 char var = exp.charAt(1); 124 char var = exp.charAt(1);
124 return (int)var - asciiX; 125 return (int)var - asciiX;
@@ -130,12 +131,12 @@ public class Utility {
130 return null; 131 return null;
131 return line.trim(); 132 return line.trim();
132 } 133 }
133 134
134 public static String getTextfromFile(String fileName) throws FileNotFoundException { 135 public static String getTextfromFile(String fileName) throws FileNotFoundException {
135 Scanner scanner = new Scanner(new File(fileName)); 136 Scanner scanner = new Scanner(new File(fileName));
136 String program = scanner.useDelimiter("\\Z").next(); 137 String program = scanner.useDelimiter("\\Z").next();
137 scanner.close(); 138 scanner.close();
138 return program; 139 return program;
139 } 140 }
140 141
141 public static String[] getPattern(BufferedReader answerReader) throws IOException { 142 public static String[] getPattern(BufferedReader answerReader) throws IOException {
@@ -147,59 +148,57 @@ public class Utility {
147 148
148 public static void removeRecursively(File file) { 149 public static void removeRecursively(File file) {
149 if (!file.exists()) return; 150 if (!file.exists()) return;
150 151
151 if (file.isDirectory()) 152 if (file.isDirectory())
152 for (File tFile: file.listFiles()) 153 for (File tFile: file.listFiles())
153 removeRecursively(tFile); 154 removeRecursively(tFile);
154 file.delete(); 155 file.delete();
155 } 156 }
156 157
157 public static void removeRecursively(String fileName) { 158 public static void removeRecursively(String fileName) {
158 removeRecursively(new File(fileName)); 159 removeRecursively(new File(fileName));
159 } 160 }
160 161
161 public static Collection<String> getQueryTexts(String fileName) throws IOException { 162 public static Collection<String> getQueryTexts(String fileName) throws IOException {
162 BufferedReader queryReader = new BufferedReader(new InputStreamReader(new FileInputStream(fileName))); 163 BufferedReader queryReader = new BufferedReader(new InputStreamReader(new FileInputStream(fileName)));
163 String line; 164 String line;
164 Collection<String> queryTexts = new LinkedList<String>(); 165 Collection<String> queryTexts = new LinkedList<String>();
165 while (true) { 166 while (true) {
166 while ((line = queryReader.readLine()) != null && ((line = line.trim()).isEmpty() || line.startsWith("#"))); 167 while((line = queryReader.readLine()) != null && ((line = line.trim()).isEmpty() || line.startsWith("#"))) ;
167 if (line == null) { 168 if (line == null) {
168 queryReader.close(); 169 queryReader.close();
169 return queryTexts; 170 return queryTexts;
170 } 171 }
171 172
172 StringBuffer query = new StringBuffer(); 173 StringBuffer query = new StringBuffer();
173 if (!line.startsWith("^[")) 174 if (!line.startsWith("^["))
174 query.append(line).append(LINE_SEPARATOR); 175 query.append(line).append(LINE_SEPARATOR);
175 176
176 while ((line = queryReader.readLine()) != null && !line.trim().endsWith("}")) 177 while((line = queryReader.readLine()) != null && !line.trim().endsWith("}"))
177 query.append(line).append(LINE_SEPARATOR); 178 query.append(line).append(LINE_SEPARATOR);
178 query.append(line); 179 query.append(line);
179 queryTexts.add(query.toString()); 180 queryTexts.add(query.toString());
180 } 181 }
181 } 182 }
182 183
183 /** 184 /**
184 * 185 *
185 * @param answerReader 186 * @param answerReader
186 * @return all lines before the next empty line 187 * @return all lines before the next empty line
187 * @throws IOException 188 * @throws IOException
188 */ 189 */
189 public static Collection<String> getLines(BufferedReader answerReader) throws IOException { 190 public static Collection<String> getLines(BufferedReader answerReader) throws IOException {
190 Collection<String> answerTuples = new LinkedList<String>(); 191 Collection<String> answerTuples = new LinkedList<String>();
191 String line; 192 String line;
192 while ((line = answerReader.readLine()) != null) { 193 while ((line = answerReader.readLine()) != null) {
193 line = line.trim(); 194 line = line.trim();
194 if (line.isEmpty()) 195 if (line.isEmpty())
195 break; 196 break;
196 answerTuples.add(line); 197 answerTuples.add(line);
197 } 198 }
198 return answerTuples; 199 return answerTuples;
199 } 200 }
200 201
201 private static StringBuilder logMessage = new StringBuilder();
202
203 private static String getLogMessage(Object[] messages) { 202 private static String getLogMessage(Object[] messages) {
204 if (messages.length == 1) return messages[0].toString(); 203 if (messages.length == 1) return messages[0].toString();
205 else { 204 else {
@@ -213,7 +212,11 @@ public class Utility {
213 } 212 }
214 213
215 } 214 }
216 215
216 public static void setLogLevel(Level level) {
217 LOGS.setLevel(level);
218 }
219
217 public static void logInfo(Object... messages) { 220 public static void logInfo(Object... messages) {
218 if (LOGS != null) 221 if (LOGS != null)
219 LOGS.info(getLogMessage(messages)); 222 LOGS.info(getLogMessage(messages));
@@ -233,20 +236,6 @@ public class Utility {
233 if (LOGS != null) 236 if (LOGS != null)
234 LOGS.error(getLogMessage(messages)); 237 LOGS.error(getLogMessage(messages));
235 } 238 }
236
237 public static void initialise() {
238 File tmp = new File(TempDirectory);
239 if (!tmp.exists()) tmp.mkdirs();
240 }
241
242 public static void cleanup() {
243 File tmp = new File(TempDirectory);
244 if (tmp.exists()) {
245 for (File file: tmp.listFiles())
246 file.delete();
247 tmp.delete();
248 }
249 }
250 239
251 public static String toFileIRI(String path) { 240 public static String toFileIRI(String path) {
252 String iri; 241 String iri;
diff --git a/src/uk/ac/ox/cs/pagoda/util/tuples/Tuple.java b/src/uk/ac/ox/cs/pagoda/util/tuples/Tuple.java
new file mode 100644
index 0000000..3e72748
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/util/tuples/Tuple.java
@@ -0,0 +1,44 @@
1package uk.ac.ox.cs.pagoda.util.tuples;
2
3import java.util.ArrayList;
4import java.util.Iterator;
5import java.util.Spliterator;
6import java.util.function.Consumer;
7
8public class Tuple<T> implements Iterable<T> {
9
10 final ArrayList<T> elements = new ArrayList<>();
11
12 Tuple() { }
13
14 public Tuple(T... elements) {
15 for(T t: elements) {
16 this.elements.add(t);
17 }
18 }
19
20 public Tuple(Iterable<T> iterable) {
21 for (T t : iterable) {
22 this.elements.add(t);
23 }
24 }
25
26 public T get(int i) {
27 return elements.get(i);
28 }
29
30 @Override
31 public Iterator<T> iterator() {
32 return elements.iterator();
33 }
34
35 @Override
36 public void forEach(Consumer<? super T> action) {
37 elements.forEach(action);
38 }
39
40 @Override
41 public Spliterator<T> spliterator() {
42 return elements.spliterator();
43 }
44}
diff --git a/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java b/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java
new file mode 100644
index 0000000..ee2b74d
--- /dev/null
+++ b/src/uk/ac/ox/cs/pagoda/util/tuples/TupleBuilder.java
@@ -0,0 +1,25 @@
1package uk.ac.ox.cs.pagoda.util.tuples;
2
3/**
4 * Allows to create an immutable <tt>Tuple</tt> in a non-atomic way.
5 * It can create only one <tt>Tuple</tt>.
6 * */
7public class TupleBuilder<T> {
8
9 private Tuple tuple = new Tuple();
10
11 private boolean building = true;
12
13 public boolean append(T t) {
14 if(building) tuple.elements.add(t);
15 return building;
16 }
17
18 public Tuple<T> build() {
19 if(building) {
20 building = false;
21 return tuple;
22 }
23 return null;
24 }
25}