diff options
| author | RncLsn <rnc.lsn@gmail.com> | 2015-06-02 15:40:29 +0100 |
|---|---|---|
| committer | RncLsn <rnc.lsn@gmail.com> | 2015-06-02 15:40:29 +0100 |
| commit | 691964863246bbf6ef9f72cc5e82c83df34f135a (patch) | |
| tree | 98c9834f788ff320cabcfb29733890c5b40e024f /src/uk/ac/ox/cs | |
| parent | 4b7253559c290b6fdd1c4122830f153fda85dd62 (diff) | |
| download | ACQuA-691964863246bbf6ef9f72cc5e82c83df34f135a.tar.gz ACQuA-691964863246bbf6ef9f72cc5e82c83df34f135a.zip | |
Working query-dependent semi-skolemised upper bound (tested on UOBM1 and LUBM1).
Diffstat (limited to 'src/uk/ac/ox/cs')
10 files changed, 254 insertions, 232 deletions
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java index b708bee..e1be6d2 100644 --- a/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/multistage/MultiStageQueryEngine.java | |||
| @@ -82,7 +82,6 @@ public class MultiStageQueryEngine extends StageQueryEngine { | |||
| 82 | if(gap != null) | 82 | if(gap != null) |
| 83 | treatment.addAdditionalGapTuples(); | 83 | treatment.addAdditionalGapTuples(); |
| 84 | String programName = "multi-stage upper program"; | 84 | String programName = "multi-stage upper program"; |
| 85 | Utility.logInfo(name + " store is materialising " + programName + " ..."); | ||
| 86 | Timer t = new Timer(); | 85 | Timer t = new Timer(); |
| 87 | 86 | ||
| 88 | String datalogProgram = program.getDatalogRuleText(); | 87 | String datalogProgram = program.getDatalogRuleText(); |
| @@ -101,7 +100,8 @@ public class MultiStageQueryEngine extends StageQueryEngine { | |||
| 101 | long oldTripleCount = store.getTriplesCount(); | 100 | long oldTripleCount = store.getTriplesCount(); |
| 102 | 101 | ||
| 103 | subTimer.reset(); | 102 | subTimer.reset(); |
| 104 | Utility.logInfo("Iteration " + ++iteration + ": "); | 103 | Utility.logInfo(name + " store is materialising " + |
| 104 | programName + "... (iteration " + ++iteration + ")"); | ||
| 105 | 105 | ||
| 106 | incrementally = (iteration != 1); | 106 | incrementally = (iteration != 1); |
| 107 | 107 | ||
| @@ -143,8 +143,8 @@ public class MultiStageQueryEngine extends StageQueryEngine { | |||
| 143 | subTimer.reset(); | 143 | subTimer.reset(); |
| 144 | if((violations = program.isIntegrated(this, incrementally)) == null || violations.size() == 0) { | 144 | if((violations = program.isIntegrated(this, incrementally)) == null || violations.size() == 0) { |
| 145 | store.clearRulesAndMakeFactsExplicit(); | 145 | store.clearRulesAndMakeFactsExplicit(); |
| 146 | Utility.logInfo(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - tripleCountBeforeMat) + " new)"); | 146 | Utility.logDebug(name + " store after materialising " + programName + ": " + tripleCount + " (" + (tripleCount - tripleCountBeforeMat) + " new)"); |
| 147 | Utility.logInfo(name + " store is DONE for multi-stage materialising in " + t.duration() + " seconds."); | 147 | Utility.logDebug(name + " store is DONE for multi-stage materialising in " + t.duration() + " seconds."); |
| 148 | return isValid() ? 1 : 0; | 148 | return isValid() ? 1 : 0; |
| 149 | } | 149 | } |
| 150 | Utility.logDebug("Time to detect violations: " + subTimer.duration()); | 150 | Utility.logDebug("Time to detect violations: " + subTimer.duration()); |
diff --git a/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java b/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java index c8776fe..ffca55a 100644 --- a/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/multistage/StageQueryEngine.java | |||
| @@ -40,7 +40,7 @@ public abstract class StageQueryEngine extends BasicQueryEngine { | |||
| 40 | } | 40 | } |
| 41 | 41 | ||
| 42 | if (validMaterialisation) | 42 | if (validMaterialisation) |
| 43 | Utility.logInfo("The " + name + " store is valid."); | 43 | Utility.logDebug("The " + name + " store is valid."); |
| 44 | else | 44 | else |
| 45 | Utility.logInfo("The " + name + " store is not valid."); | 45 | Utility.logInfo("The " + name + " store is not valid."); |
| 46 | return validMaterialisation; | 46 | return validMaterialisation; |
diff --git a/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java b/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java index 78aced1..9a9d0de 100644 --- a/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java +++ b/src/uk/ac/ox/cs/pagoda/query/AnswerTuple.java | |||
| @@ -16,14 +16,10 @@ import java.lang.reflect.Type; | |||
| 16 | import java.util.HashMap; | 16 | import java.util.HashMap; |
| 17 | import java.util.Map; | 17 | import java.util.Map; |
| 18 | import java.util.StringTokenizer; | 18 | import java.util.StringTokenizer; |
| 19 | import java.util.regex.Matcher; | ||
| 20 | import java.util.regex.Pattern; | ||
| 21 | 19 | ||
| 22 | public class AnswerTuple { | 20 | public class AnswerTuple { |
| 23 | 21 | ||
| 24 | public static final String SEPARATOR = "\t"; | 22 | public static final String SEPARATOR = "\t"; |
| 25 | static final Pattern owlLiteralRegex = | ||
| 26 | Pattern.compile("^\"(?<lexicalForm>[^@]+(@(?<langTag>.+))?)\"(^^<(?<dataType>.+)>)?$"); | ||
| 27 | String m_str = null; | 23 | String m_str = null; |
| 28 | GroundTerm[] m_tuple; | 24 | GroundTerm[] m_tuple; |
| 29 | 25 | ||
| @@ -158,32 +154,36 @@ public class AnswerTuple { | |||
| 158 | String tuplesString = json.getAsJsonPrimitive().getAsString(); | 154 | String tuplesString = json.getAsJsonPrimitive().getAsString(); |
| 159 | // StringTokenizer tokenizer = new StringTokenizer(tuplesString, SEPARATOR); | 155 | // StringTokenizer tokenizer = new StringTokenizer(tuplesString, SEPARATOR); |
| 160 | StringTokenizer tokenizer = new StringTokenizer(tuplesString); | 156 | StringTokenizer tokenizer = new StringTokenizer(tuplesString); |
| 161 | GroundTerm[] terms = new GroundTerm[tokenizer.countTokens()]; | 157 | int tokensCount = tokenizer.countTokens(); |
| 158 | GroundTerm[] terms = new GroundTerm[tokensCount]; | ||
| 162 | 159 | ||
| 163 | // TODO test parsing | 160 | // TODO test parsing |
| 164 | for (int i = 0; i < tokenizer.countTokens(); i++) { | 161 | for(int i = 0; i < tokensCount; i++) { |
| 165 | String token = tokenizer.nextToken(); | 162 | String token = tokenizer.nextToken(); |
| 166 | if (token.charAt(0) == '<') { | 163 | if (token.charAt(0) == '<') { |
| 167 | terms[i] = uk.ac.ox.cs.JRDFox.model.Individual.create(token.substring(1,token.length()-1)); | 164 | terms[i] = uk.ac.ox.cs.JRDFox.model.Individual.create(token.substring(1,token.length()-1)); |
| 168 | } | 165 | } |
| 169 | else if (token.charAt(0) == '"') { | 166 | else if (token.charAt(0) == '"') { |
| 170 | Matcher matcher = owlLiteralRegex.matcher(token); | 167 | Datatype datatype; |
| 171 | if(matcher.matches()) { | 168 | String lexicalForm; |
| 172 | String lexicalForm = matcher.group("lexicalForm"); | 169 | if(token.contains("^^")) { |
| 173 | String dataTypeIRI = matcher.group("dataType"); | 170 | String[] lexicalFormAndType = token.split("^^"); |
| 174 | Datatype dataType; | 171 | lexicalForm = lexicalFormAndType[0]; |
| 175 | if(dataTypeIRI == null || dataTypeIRI.isEmpty()) dataType = Datatype.RDF_PLAIN_LITERAL; | 172 | datatype = Datatype.value(lexicalFormAndType[1]); |
| 176 | else dataType = uk.ac.ox.cs.JRDFox.model.Datatype.value(dataTypeIRI); | ||
| 177 | terms[i] = uk.ac.ox.cs.JRDFox.model.Literal.create(lexicalForm, dataType); | ||
| 178 | } | 173 | } |
| 179 | else { | 174 | else { |
| 180 | throw new IllegalArgumentException("The given json does not represent a valid AnswerTuple"); | 175 | lexicalForm = token.substring(1, token.length() - 1); |
| 176 | // TODO check | ||
| 177 | // datatype = token.contains("@") ? Datatype.RDF_PLAIN_LITERAL : Datatype.XSD_STRING; | ||
| 178 | datatype = Datatype.XSD_STRING; | ||
| 181 | } | 179 | } |
| 180 | terms[i] = uk.ac.ox.cs.JRDFox.model.Literal.create(lexicalForm, datatype); | ||
| 182 | } | 181 | } |
| 183 | else { | 182 | else { |
| 184 | terms[i] = uk.ac.ox.cs.JRDFox.model.BlankNode.create(token); | 183 | terms[i] = uk.ac.ox.cs.JRDFox.model.BlankNode.create(token); |
| 185 | } | 184 | } |
| 186 | } | 185 | } |
| 186 | |||
| 187 | return new AnswerTuple(terms); | 187 | return new AnswerTuple(terms); |
| 188 | } | 188 | } |
| 189 | } | 189 | } |
diff --git a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java index 742b7da..516a461 100644 --- a/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java +++ b/src/uk/ac/ox/cs/pagoda/query/QueryRecord.java | |||
| @@ -135,74 +135,16 @@ public class QueryRecord extends Disposable { | |||
| 135 | return updateUpperBoundAnswers(answerTuples, false); | 135 | return updateUpperBoundAnswers(answerTuples, false); |
| 136 | } | 136 | } |
| 137 | 137 | ||
| 138 | public boolean updateUpperBoundAnswers(AnswerTuples answerTuples, boolean toCheckAux) { | 138 | public boolean checkUpperBoundAnswers(AnswerTuples answerTuples) { |
| 139 | if(isDisposed()) throw new DisposedException(); | 139 | if(isDisposed()) throw new DisposedException(); |
| 140 | 140 | ||
| 141 | if(!(answerTuples instanceof RDFoxAnswerTuples)) { | 141 | return updateUpperBoundAnswers(answerTuples, true, false); |
| 142 | String msg = "The upper bound must be computed by RDFox!"; | 142 | } |
| 143 | Utility.logError(msg); | ||
| 144 | throw new IllegalArgumentException(msg); | ||
| 145 | } | ||
| 146 | |||
| 147 | RDFoxAnswerTuples rdfoxAnswerTuples = (RDFoxAnswerTuples) answerTuples; | ||
| 148 | |||
| 149 | Set<AnswerTuple> candidateGapAnswerTuples = new HashSet<AnswerTuple>(); | ||
| 150 | AnswerTuple tuple; | ||
| 151 | for(; rdfoxAnswerTuples.isValid(); rdfoxAnswerTuples.moveNext()) { | ||
| 152 | tuple = rdfoxAnswerTuples.getTuple(); | ||
| 153 | if(isBottom() || !tuple.hasAnonymousIndividual()) | ||
| 154 | if((!toCheckAux || !tuple.hasAuxPredicate()) && !soundAnswerTuples.contains(tuple)) | ||
| 155 | candidateGapAnswerTuples.add(tuple); | ||
| 156 | } | ||
| 157 | |||
| 158 | /*** START: debugging ***/ | ||
| 159 | if(PagodaProperties.isDebuggingMode()) { | ||
| 160 | if(rdfoxAnswerTuples.getArity() != getAnswerVariables().length) | ||
| 161 | throw new IllegalArgumentException( | ||
| 162 | "The arity of answers (" + rdfoxAnswerTuples.getArity() + ") " + | ||
| 163 | "is different from the number of answer variables (" + | ||
| 164 | getAnswerVariables().length + ")"); | ||
| 165 | |||
| 166 | Set<AnswerTuple> namedAnswerTuples = new HashSet<>(); | ||
| 167 | rdfoxAnswerTuples.reset(); | ||
| 168 | for(; rdfoxAnswerTuples.isValid(); rdfoxAnswerTuples.moveNext()) { | ||
| 169 | tuple = rdfoxAnswerTuples.getTuple(); | ||
| 170 | // if(isBottom() || !tuple.hasAnonymousIndividual()) { | ||
| 171 | namedAnswerTuples.add(tuple); | ||
| 172 | // } | ||
| 173 | } | ||
| 174 | HashSet<AnswerTuple> difference = new HashSet<>(soundAnswerTuples); | ||
| 175 | difference.removeAll(namedAnswerTuples); | ||
| 176 | if(!difference.isEmpty()) | ||
| 177 | throw new IllegalArgumentException("The upper bound does not contain the lower bound! Missing answers: " + difference | ||
| 178 | .size()); | ||
| 179 | } | ||
| 180 | /*** END: debugging ***/ | ||
| 181 | |||
| 182 | boolean update; | ||
| 183 | if(gapAnswerTuples == null) { | ||
| 184 | gapAnswerTuples = candidateGapAnswerTuples; | ||
| 185 | update = true; | ||
| 186 | } | ||
| 187 | else { | ||
| 188 | update = gapAnswerTuples.retainAll(candidateGapAnswerTuples); | ||
| 189 | } | ||
| 190 | |||
| 191 | if(update) | ||
| 192 | Utility.logInfo("Upper bound answers updated: " + getNumberOfAnswers()); | ||
| 193 | else | ||
| 194 | Utility.logInfo("Upper bound answers unchanged"); | ||
| 195 | 143 | ||
| 196 | return update; | 144 | public boolean updateUpperBoundAnswers(AnswerTuples answerTuples, boolean toCheckAux) { |
| 145 | if(isDisposed()) throw new DisposedException(); | ||
| 197 | 146 | ||
| 198 | // boolean update = false; | 147 | return updateUpperBoundAnswers(answerTuples, toCheckAux, true); |
| 199 | // for(Iterator<AnswerTuple> iter = gapAnswerTuples.iterator(); iter.hasNext(); ) { | ||
| 200 | // tuple = iter.next(); | ||
| 201 | // if(!candidateGapAnswerTuples.contains(tuple)) { | ||
| 202 | // iter.remove(); | ||
| 203 | // update = true; | ||
| 204 | // } | ||
| 205 | // } | ||
| 206 | } | 148 | } |
| 207 | 149 | ||
| 208 | public int getNumberOfAnswers() { | 150 | public int getNumberOfAnswers() { |
| @@ -713,6 +655,77 @@ public class QueryRecord extends Disposable { | |||
| 713 | return Objects.hash(queryText, soundAnswerTuples); | 655 | return Objects.hash(queryText, soundAnswerTuples); |
| 714 | } | 656 | } |
| 715 | 657 | ||
| 658 | private boolean updateUpperBoundAnswers(AnswerTuples answerTuples, boolean toCheckAux, boolean _check_containment) { | ||
| 659 | if(!(answerTuples instanceof RDFoxAnswerTuples)) { | ||
| 660 | String msg = "The upper bound must be computed by RDFox!"; | ||
| 661 | Utility.logError(msg); | ||
| 662 | throw new IllegalArgumentException(msg); | ||
| 663 | } | ||
| 664 | |||
| 665 | RDFoxAnswerTuples rdfoxAnswerTuples = (RDFoxAnswerTuples) answerTuples; | ||
| 666 | |||
| 667 | Set<AnswerTuple> candidateGapAnswerTuples = new HashSet<AnswerTuple>(); | ||
| 668 | AnswerTuple tuple; | ||
| 669 | for(; rdfoxAnswerTuples.isValid(); rdfoxAnswerTuples.moveNext()) { | ||
| 670 | tuple = rdfoxAnswerTuples.getTuple(); | ||
| 671 | if(isBottom() || !tuple.hasAnonymousIndividual()) | ||
| 672 | if((!toCheckAux || !tuple.hasAuxPredicate()) && !soundAnswerTuples.contains(tuple)) | ||
| 673 | candidateGapAnswerTuples.add(tuple); | ||
| 674 | } | ||
| 675 | |||
| 676 | /*** START: debugging ***/ | ||
| 677 | if(PagodaProperties.isDebuggingMode() && _check_containment) { | ||
| 678 | if(rdfoxAnswerTuples.getArity() != getAnswerVariables().length) | ||
| 679 | throw new IllegalArgumentException( | ||
| 680 | "The arity of answers (" + rdfoxAnswerTuples.getArity() + ") " + | ||
| 681 | "is different from the number of answer variables (" + | ||
| 682 | getAnswerVariables().length + ")"); | ||
| 683 | |||
| 684 | Set<AnswerTuple> namedAnswerTuples = new HashSet<>(); | ||
| 685 | rdfoxAnswerTuples.reset(); | ||
| 686 | int numberOfAnswers = 0; | ||
| 687 | for(; rdfoxAnswerTuples.isValid(); rdfoxAnswerTuples.moveNext()) { | ||
| 688 | tuple = rdfoxAnswerTuples.getTuple(); | ||
| 689 | // if(isBottom() || !tuple.hasAnonymousIndividual()) { | ||
| 690 | namedAnswerTuples.add(tuple); | ||
| 691 | // } | ||
| 692 | numberOfAnswers++; | ||
| 693 | } | ||
| 694 | Utility.logDebug("The number of answers returned by an upper bound: " + numberOfAnswers); | ||
| 695 | HashSet<AnswerTuple> difference = new HashSet<>(soundAnswerTuples); | ||
| 696 | difference.removeAll(namedAnswerTuples); | ||
| 697 | if(!difference.isEmpty()) | ||
| 698 | throw new IllegalArgumentException("The upper bound does not contain the lower bound! Missing answers: " + difference | ||
| 699 | .size()); | ||
| 700 | } | ||
| 701 | /*** END: debugging ***/ | ||
| 702 | |||
| 703 | boolean update; | ||
| 704 | if(gapAnswerTuples == null) { | ||
| 705 | gapAnswerTuples = candidateGapAnswerTuples; | ||
| 706 | update = true; | ||
| 707 | } | ||
| 708 | else { | ||
| 709 | update = gapAnswerTuples.retainAll(candidateGapAnswerTuples); | ||
| 710 | } | ||
| 711 | |||
| 712 | if(update) | ||
| 713 | Utility.logInfo("Upper bound answers updated: " + getNumberOfAnswers()); | ||
| 714 | else | ||
| 715 | Utility.logInfo("Upper bound answers unchanged"); | ||
| 716 | |||
| 717 | return update; | ||
| 718 | |||
| 719 | // boolean update = false; | ||
| 720 | // for(Iterator<AnswerTuple> iter = gapAnswerTuples.iterator(); iter.hasNext(); ) { | ||
| 721 | // tuple = iter.next(); | ||
| 722 | // if(!candidateGapAnswerTuples.contains(tuple)) { | ||
| 723 | // iter.remove(); | ||
| 724 | // update = true; | ||
| 725 | // } | ||
| 726 | // } | ||
| 727 | } | ||
| 728 | |||
| 716 | public enum Step { | 729 | public enum Step { |
| 717 | LOWER_BOUND, | 730 | LOWER_BOUND, |
| 718 | UPPER_BOUND, | 731 | UPPER_BOUND, |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java index b4a1775..453b5ca 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/ConsistencyManager.java | |||
| @@ -128,7 +128,7 @@ public class ConsistencyManager extends Disposable { | |||
| 128 | store.applyReasoning(true); | 128 | store.applyReasoning(true); |
| 129 | tripleCount = store.getTriplesCount(); | 129 | tripleCount = store.getTriplesCount(); |
| 130 | 130 | ||
| 131 | Utility.logInfo("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", | 131 | Utility.logDebug("tracking store after materialising tracking program: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", |
| 132 | "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); | 132 | "tracking store finished the materialisation of tracking program in " + t1.duration() + " seconds."); |
| 133 | 133 | ||
| 134 | extractAxioms(); | 134 | extractAxioms(); |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index 8445713..acdb8a3 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | |||
| @@ -31,42 +31,29 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 31 | OWLOntology ontology; | 31 | OWLOntology ontology; |
| 32 | DatalogProgram program; | 32 | DatalogProgram program; |
| 33 | 33 | ||
| 34 | // String additonalDataFile; | ||
| 35 | BasicQueryEngine rlLowerStore = null; | 34 | BasicQueryEngine rlLowerStore = null; |
| 36 | BasicQueryEngine lazyUpperStore = null; | 35 | BasicQueryEngine lazyUpperStore = null; |
| 37 | MultiStageQueryEngine limitedSkolemUpperStore; | 36 | // MultiStageQueryEngine limitedSkolemUpperStore; |
| 38 | OWLOntology elho_ontology; | 37 | OWLOntology elho_ontology; |
| 39 | // boolean[] namedIndividuals_lazyUpper; | ||
| 40 | KarmaQueryEngine elLowerStore = null; | 38 | KarmaQueryEngine elLowerStore = null; |
| 41 | BasicQueryEngine trackingStore = null; | 39 | BasicQueryEngine trackingStore = null; |
| 42 | // boolean[] namedIndividuals_tracking; | ||
| 43 | TrackingRuleEncoder encoder; | 40 | TrackingRuleEncoder encoder; |
| 44 | private boolean equalityTag; | 41 | private boolean equalityTag; |
| 45 | private boolean multiStageTag; | ||
| 46 | private Timer t = new Timer(); | 42 | private Timer t = new Timer(); |
| 47 | private Collection<String> predicatesWithGap = null; | 43 | private Collection<String> predicatesWithGap = null; |
| 48 | private SatisfiabilityStatus satisfiable; | 44 | private SatisfiabilityStatus satisfiable; |
| 49 | private ConsistencyManager consistency = new ConsistencyManager(this); | 45 | private ConsistencyManager consistency = new ConsistencyManager(this); |
| 50 | private boolean useUpperStores = false; | ||
| 51 | 46 | ||
| 52 | public MyQueryReasoner() { | 47 | public MyQueryReasoner() { |
| 53 | setup(true, true); | 48 | setup(true); |
| 54 | } | 49 | } |
| 55 | 50 | ||
| 56 | public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { | 51 | public MyQueryReasoner(boolean multiStageTag, boolean considerEqualities) { |
| 57 | setup(multiStageTag, considerEqualities); | 52 | if(!multiStageTag) |
| 58 | } | 53 | throw new IllegalArgumentException( |
| 54 | "Value \"true\" for parameter \"multiStageTag\" is no longer supported"); | ||
| 59 | 55 | ||
| 60 | public void setup(boolean multiStageTag, boolean considerEqualities) { | 56 | setup(considerEqualities); |
| 61 | if(isDisposed()) throw new DisposedException(); | ||
| 62 | satisfiable = SatisfiabilityStatus.UNCHECKED; | ||
| 63 | this.multiStageTag = multiStageTag; | ||
| 64 | this.equalityTag = considerEqualities; | ||
| 65 | |||
| 66 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); | ||
| 67 | elLowerStore = new KarmaQueryEngine("elho-lower-bound"); | ||
| 68 | |||
| 69 | trackingStore = getUpperStore("tracking", false); | ||
| 70 | } | 57 | } |
| 71 | 58 | ||
| 72 | @Override | 59 | @Override |
| @@ -84,11 +71,8 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 84 | // program.getUpper().save(); | 71 | // program.getUpper().save(); |
| 85 | // program.getGeneral().save(); | 72 | // program.getGeneral().save(); |
| 86 | 73 | ||
| 87 | useUpperStores = multiStageTag && !program.getGeneral().isHorn(); | 74 | if(!program.getGeneral().isHorn()) |
| 88 | if(useUpperStores) { | 75 | lazyUpperStore = new MultiStageQueryEngine("lazy-upper-bound", true); |
| 89 | lazyUpperStore = getUpperStore("lazy-upper-bound", true); | ||
| 90 | limitedSkolemUpperStore = new MultiStageQueryEngine("limited-skolem-upper-bound", true); | ||
| 91 | } | ||
| 92 | 76 | ||
| 93 | importData(program.getAdditionalDataFile()); | 77 | importData(program.getAdditionalDataFile()); |
| 94 | 78 | ||
| @@ -104,15 +88,16 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 104 | @Override | 88 | @Override |
| 105 | public boolean preprocess() { | 89 | public boolean preprocess() { |
| 106 | if(isDisposed()) throw new DisposedException(); | 90 | if(isDisposed()) throw new DisposedException(); |
| 91 | |||
| 107 | t.reset(); | 92 | t.reset(); |
| 108 | Utility.logInfo("Preprocessing... checking satisfiability... "); | 93 | Utility.logInfo("Preprocessing (and checking satisfiability)..."); |
| 109 | 94 | ||
| 110 | String name = "data", datafile = importedData.toString(); | 95 | String name = "data", datafile = importedData.toString(); |
| 111 | rlLowerStore.importRDFData(name, datafile); | 96 | rlLowerStore.importRDFData(name, datafile); |
| 112 | rlLowerStore.materialise("lower program", program.getLower().toString()); | 97 | rlLowerStore.materialise("lower program", program.getLower().toString()); |
| 113 | // program.getLower().save(); | 98 | // program.getLower().save(); |
| 114 | if(!consistency.checkRLLowerBound()) return false; | 99 | if(!consistency.checkRLLowerBound()) return false; |
| 115 | Utility.logInfo("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); | 100 | Utility.logDebug("The number of sameAs assertions in RL lower store: " + rlLowerStore.getSameAsNumber()); |
| 116 | 101 | ||
| 117 | String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); | 102 | String originalMarkProgram = OWLHelper.getOriginalMarkProgram(ontology); |
| 118 | 103 | ||
| @@ -134,22 +119,7 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 134 | } | 119 | } |
| 135 | if(consistency.checkUpper(lazyUpperStore)) { | 120 | if(consistency.checkUpper(lazyUpperStore)) { |
| 136 | satisfiable = SatisfiabilityStatus.SATISFIABLE; | 121 | satisfiable = SatisfiabilityStatus.SATISFIABLE; |
| 137 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | 122 | Utility.logDebug("time for satisfiability checking: " + t.duration()); |
| 138 | } | ||
| 139 | |||
| 140 | if(limitedSkolemUpperStore != null) { | ||
| 141 | limitedSkolemUpperStore.importRDFData(name, datafile); | ||
| 142 | limitedSkolemUpperStore.materialise("saturate named individuals", originalMarkProgram); | ||
| 143 | int tag = limitedSkolemUpperStore.materialiseSkolemly(program, null); | ||
| 144 | if(tag != 1) { | ||
| 145 | limitedSkolemUpperStore.dispose(); | ||
| 146 | limitedSkolemUpperStore = null; | ||
| 147 | } | ||
| 148 | if(tag == -1) return false; | ||
| 149 | } | ||
| 150 | if(satisfiable == SatisfiabilityStatus.UNCHECKED && consistency.checkUpper(limitedSkolemUpperStore)) { | ||
| 151 | satisfiable = SatisfiabilityStatus.SATISFIABLE; | ||
| 152 | Utility.logInfo("time for satisfiability checking: " + t.duration()); | ||
| 153 | } | 123 | } |
| 154 | 124 | ||
| 155 | trackingStore.importRDFData(name, datafile); | 125 | trackingStore.importRDFData(name, datafile); |
| @@ -194,14 +164,11 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 194 | @Override | 164 | @Override |
| 195 | public void evaluate(QueryRecord queryRecord) { | 165 | public void evaluate(QueryRecord queryRecord) { |
| 196 | if(isDisposed()) throw new DisposedException(); | 166 | if(isDisposed()) throw new DisposedException(); |
| 197 | if(queryBounds(queryRecord)) | 167 | |
| 168 | if(queryLowerAndUpperBounds(queryRecord)) | ||
| 198 | return; | 169 | return; |
| 199 | 170 | ||
| 200 | OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); | 171 | OWLOntology relevantOntologySubset = extractRelevantOntologySubset(queryRecord); |
| 201 | |||
| 202 | int aBoxCount = relevantOntologySubset.getABoxAxioms(true).size(); | ||
| 203 | Utility.logInfo("Relevant ontology subset: ABox_axioms=" + aBoxCount + " TBox_axioms=" + (relevantOntologySubset | ||
| 204 | .getAxiomCount() - aBoxCount)); | ||
| 205 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); | 172 | // queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl"); |
| 206 | 173 | ||
| 207 | if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) | 174 | if(querySkolemisedRelevantSubset(relevantOntologySubset, queryRecord)) |
| @@ -240,16 +207,18 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 240 | if(lazyUpperStore != null) lazyUpperStore.dispose(); | 207 | if(lazyUpperStore != null) lazyUpperStore.dispose(); |
| 241 | if(elLowerStore != null) elLowerStore.dispose(); | 208 | if(elLowerStore != null) elLowerStore.dispose(); |
| 242 | if(trackingStore != null) trackingStore.dispose(); | 209 | if(trackingStore != null) trackingStore.dispose(); |
| 243 | if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); | 210 | // if(limitedSkolemUpperStore != null) limitedSkolemUpperStore.dispose(); |
| 244 | |||
| 245 | } | 211 | } |
| 246 | 212 | ||
| 247 | private BasicQueryEngine getUpperStore(String name, boolean checkValidity) { | 213 | private void setup(boolean considerEqualities) { |
| 248 | if(multiStageTag) | 214 | if(isDisposed()) throw new DisposedException(); |
| 249 | return new MultiStageQueryEngine(name, checkValidity); | 215 | satisfiable = SatisfiabilityStatus.UNCHECKED; |
| 250 | // return new TwoStageQueryEngine(name, checkValidity); | 216 | this.equalityTag = considerEqualities; |
| 251 | else | 217 | |
| 252 | return new BasicQueryEngine(name); | 218 | rlLowerStore = new BasicQueryEngine("rl-lower-bound"); |
| 219 | elLowerStore = new KarmaQueryEngine("elho-lower-bound"); | ||
| 220 | |||
| 221 | trackingStore = new MultiStageQueryEngine("tracking", false); | ||
| 253 | } | 222 | } |
| 254 | 223 | ||
| 255 | protected void internal_importDataFile(String name, String datafile) { | 224 | protected void internal_importDataFile(String name, String datafile) { |
| @@ -284,10 +253,37 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 284 | return false; | 253 | return false; |
| 285 | } | 254 | } |
| 286 | 255 | ||
| 256 | private boolean checkGapAnswers(BasicQueryEngine relevantStore, QueryRecord queryRecord) { | ||
| 257 | Tuple<String> extendedQueries = queryRecord.getExtendedQueryText(); | ||
| 258 | if(queryRecord.hasNonAnsDistinguishedVariables()) | ||
| 259 | checkGapAnswers(relevantStore, queryRecord, extendedQueries.get(0), queryRecord.getAnswerVariables()); | ||
| 260 | else | ||
| 261 | checkGapAnswers(relevantStore, queryRecord, queryRecord.getQueryText(), queryRecord.getAnswerVariables()); | ||
| 262 | |||
| 263 | queryRecord.addProcessingTime(Step.L_SKOLEM_UPPER_BOUND, t.duration()); | ||
| 264 | if(queryRecord.isProcessed()) { | ||
| 265 | queryRecord.setDifficulty(Step.L_SKOLEM_UPPER_BOUND); | ||
| 266 | return true; | ||
| 267 | } | ||
| 268 | return false; | ||
| 269 | } | ||
| 270 | |||
| 271 | private void checkGapAnswers(BasicQueryEngine relevantStore, QueryRecord queryRecord, String queryText, String[] answerVariables) { | ||
| 272 | AnswerTuples rlAnswer = null; | ||
| 273 | try { | ||
| 274 | Utility.logDebug(queryText); | ||
| 275 | rlAnswer = relevantStore.evaluate(queryText, answerVariables); | ||
| 276 | Utility.logDebug(t.duration()); | ||
| 277 | queryRecord.checkUpperBoundAnswers(rlAnswer); | ||
| 278 | } finally { | ||
| 279 | if(rlAnswer != null) rlAnswer.dispose(); | ||
| 280 | } | ||
| 281 | } | ||
| 282 | |||
| 287 | /** | 283 | /** |
| 288 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. | 284 | * Returns the part of the ontology relevant for Hermit, while computing the bound answers. |
| 289 | */ | 285 | */ |
| 290 | private boolean queryBounds(QueryRecord queryRecord) { | 286 | private boolean queryLowerAndUpperBounds(QueryRecord queryRecord) { |
| 291 | AnswerTuples rlAnswer = null, elAnswer = null; | 287 | AnswerTuples rlAnswer = null, elAnswer = null; |
| 292 | 288 | ||
| 293 | t.reset(); | 289 | t.reset(); |
| @@ -312,9 +308,6 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 312 | Utility.logDebug("Lazy store"); | 308 | Utility.logDebug("Lazy store"); |
| 313 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) | 309 | if(lazyUpperStore != null && queryUpperStore(lazyUpperStore, queryRecord, extendedQueryTexts, Step.LAZY_UPPER_BOUND)) |
| 314 | return true; | 310 | return true; |
| 315 | // Utility.logDebug("Skolem store"); | ||
| 316 | // if(limitedSkolemUpperStore != null && queryUpperStore(limitedSkolemUpperStore, queryRecord, extendedQueryTexts, Step.L_SKOLEM_UPPER_BOUND)) | ||
| 317 | // return null; | ||
| 318 | } | 311 | } |
| 319 | 312 | ||
| 320 | t.reset(); | 313 | t.reset(); |
| @@ -338,6 +331,8 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 338 | } | 331 | } |
| 339 | 332 | ||
| 340 | private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { | 333 | private OWLOntology extractRelevantOntologySubset(QueryRecord queryRecord) { |
| 334 | Utility.logInfo("Extracting relevant ontology-subset..."); | ||
| 335 | |||
| 341 | t.reset(); | 336 | t.reset(); |
| 342 | 337 | ||
| 343 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); | 338 | QueryTracker tracker = new QueryTracker(encoder, rlLowerStore, queryRecord); |
| @@ -345,6 +340,14 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 345 | 340 | ||
| 346 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); | 341 | queryRecord.addProcessingTime(Step.FRAGMENT, t.duration()); |
| 347 | 342 | ||
| 343 | // just statistics | ||
| 344 | int numOfABoxAxioms = relevantOntologySubset.getABoxAxioms(true).size(); | ||
| 345 | int numOfTBoxAxioms = relevantOntologySubset.getAxiomCount() - numOfABoxAxioms; | ||
| 346 | int originalNumOfABoxAxioms = ontology.getABoxAxioms(true).size(); | ||
| 347 | int originalNumOfTBoxAxioms = ontology.getAxiomCount() - originalNumOfABoxAxioms; | ||
| 348 | Utility.logInfo("Relevant ontology-subset has been extracted: |ABox|=" | ||
| 349 | + numOfABoxAxioms + ", |TBox|=" + numOfTBoxAxioms); | ||
| 350 | |||
| 348 | return relevantOntologySubset; | 351 | return relevantOntologySubset; |
| 349 | } | 352 | } |
| 350 | 353 | ||
| @@ -361,22 +364,23 @@ class MyQueryReasoner extends QueryReasoner { | |||
| 361 | } | 364 | } |
| 362 | 365 | ||
| 363 | private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { | 366 | private boolean querySkolemisedRelevantSubset(OWLOntology relevantSubset, QueryRecord queryRecord) { |
| 367 | Utility.logInfo("Evaluating semi-Skolemised relevant upper store..."); | ||
| 368 | |||
| 364 | DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false | 369 | DatalogProgram relevantProgram = new DatalogProgram(relevantSubset, false); // toClassify is false |
| 365 | 370 | ||
| 366 | MultiStageQueryEngine relevantStore = | 371 | MultiStageQueryEngine relevantStore = |
| 367 | new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true | 372 | new MultiStageQueryEngine("Relevant-store", true); // checkValidity is true |
| 368 | // relevantStore.importRDFData("data", relevantProgram.getAdditionalDataFile()); // tried, doesn't work | 373 | |
| 369 | relevantStore.importDataFromABoxOf(relevantSubset); | 374 | relevantStore.importDataFromABoxOf(relevantSubset); |
| 370 | 375 | ||
| 371 | int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); | 376 | int materialisationResult = relevantStore.materialiseSkolemly(relevantProgram, null); |
| 372 | // int materialisationResult = relevantStore.materialiseRestrictedly(relevantProgram, null); // DOESN'T WORK!!! | ||
| 373 | if(materialisationResult != 1) | 377 | if(materialisationResult != 1) |
| 374 | throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency | 378 | throw new RuntimeException("Skolemised materialisation error"); // TODO check consistency |
| 375 | // relevantStore.materialiseRestrictedly(relevantProgram, null); // it has been tried | ||
| 376 | 379 | ||
| 377 | return queryUpperStore(relevantStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); | 380 | boolean isFullyProcessed = checkGapAnswers(relevantStore, queryRecord); |
| 378 | 381 | ||
| 379 | // return queryUpperStore(limitedSkolemUpperStore, queryRecord, queryRecord.getExtendedQueryText(), Step.L_SKOLEM_UPPER_BOUND); | 382 | Utility.logInfo("Semi-Skolemised relevant upper store has been evaluated"); |
| 383 | return isFullyProcessed; | ||
| 380 | } | 384 | } |
| 381 | 385 | ||
| 382 | enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} | 386 | enum SatisfiabilityStatus {SATISFIABLE, UNSATISFIABLE, UNCHECKED} |
diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java index f835ba9..8b22919 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/RDFoxQueryEngine.java | |||
| @@ -74,7 +74,7 @@ public abstract class RDFoxQueryEngine extends QueryEngine { | |||
| 74 | long prevTriplesCount = store.getTriplesCount(); | 74 | long prevTriplesCount = store.getTriplesCount(); |
| 75 | store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); | 75 | store.importOntology(ontology.getOWLOntologyManager().createOntology(ontology.getABoxAxioms(true))); |
| 76 | long loadedTriples = store.getTriplesCount() - prevTriplesCount; | 76 | long loadedTriples = store.getTriplesCount() - prevTriplesCount; |
| 77 | Utility.logInfo(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) | 77 | Utility.logDebug(name + ": loaded " + loadedTriples + " triples from " + ontology.getABoxAxioms(true) |
| 78 | .size() + " ABox axioms"); | 78 | .size() + " ABox axioms"); |
| 79 | } catch(JRDFStoreException | OWLOntologyCreationException e) { | 79 | } catch(JRDFStoreException | OWLOntologyCreationException e) { |
| 80 | e.printStackTrace(); | 80 | e.printStackTrace(); |
diff --git a/src/uk/ac/ox/cs/pagoda/rules/Program.java b/src/uk/ac/ox/cs/pagoda/rules/Program.java index 4e147bb..afc32d4 100644 --- a/src/uk/ac/ox/cs/pagoda/rules/Program.java +++ b/src/uk/ac/ox/cs/pagoda/rules/Program.java | |||
| @@ -106,7 +106,7 @@ protected PredicateDependency dependencyGraph; | |||
| 106 | clauses.addAll(botStrategy.process(botRelated)); | 106 | clauses.addAll(botStrategy.process(botRelated)); |
| 107 | 107 | ||
| 108 | if(this instanceof GeneralProgram) | 108 | if(this instanceof GeneralProgram) |
| 109 | Utility.logInfo("The number of rules: " + (clauses.size() - 1)); | 109 | Utility.logDebug("The number of rules: " + (clauses.size() - 1)); |
| 110 | } | 110 | } |
| 111 | 111 | ||
| 112 | @Override | 112 | @Override |
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java b/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java index d2d041f..27d3a53 100644 --- a/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java +++ b/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java | |||
| @@ -73,7 +73,7 @@ public class QueryTracker { | |||
| 73 | store.applyReasoning(incrementally); | 73 | store.applyReasoning(incrementally); |
| 74 | tripleCount = store.getTriplesCount(); | 74 | tripleCount = store.getTriplesCount(); |
| 75 | 75 | ||
| 76 | Utility.logInfo("tracking store after materialising tracking program: " | 76 | Utility.logDebug("tracking store after materialising tracking program: " |
| 77 | + tripleCount | 77 | + tripleCount |
| 78 | + " (" | 78 | + " (" |
| 79 | + (tripleCount - oldTripleCount) | 79 | + (tripleCount - oldTripleCount) |
| @@ -149,28 +149,47 @@ public class QueryTracker { | |||
| 149 | 149 | ||
| 150 | } | 150 | } |
| 151 | 151 | ||
| 152 | public void addRelatedAxiomsAndClauses(QueryRecord[] botQueryRecords) { | ||
| 153 | LinkedList<QueryRecord> toAddedRecords = new LinkedList<QueryRecord>(); | ||
| 154 | |||
| 155 | for(QueryRecord botQueryRecord : botQueryRecords) | ||
| 156 | if(overlappingDisjunctiveClauses(botQueryRecord) != null) | ||
| 157 | toAddedRecords.add(botQueryRecord); | ||
| 158 | |||
| 159 | for(QueryRecord botQueryRecord : toAddedRecords) { | ||
| 160 | m_manager.addAxioms(m_record.getRelevantOntology(), botQueryRecord.getRelevantOntology().getAxioms()); | ||
| 161 | for(DLClause clause : botQueryRecord.getRelevantClauses()) | ||
| 162 | m_record.addRelevantClauses(clause); | ||
| 163 | } | ||
| 164 | |||
| 165 | if(!toAddedRecords.isEmpty()) | ||
| 166 | Utility.logDebug("Part of bottom fragments is added for this query."); | ||
| 167 | else | ||
| 168 | Utility.logDebug("None of bottom fragments is added for this query."); | ||
| 169 | } | ||
| 170 | |||
| 152 | private int extractBinaryTuples(BasicQueryEngine trackingStore, OWLDataFactory factory, Set<String> binaryPredicates) { | 171 | private int extractBinaryTuples(BasicQueryEngine trackingStore, OWLDataFactory factory, Set<String> binaryPredicates) { |
| 153 | OWLOntology fragment = m_record.getRelevantOntology(); | 172 | OWLOntology fragment = m_record.getRelevantOntology(); |
| 154 | int count; | 173 | int count; |
| 155 | int aboxAxiomCounter = 0; | 174 | int aboxAxiomCounter = 0; |
| 156 | Resource sub, obj; | 175 | Resource sub, obj; |
| 157 | OWLAxiom aboxAxiom; | 176 | OWLAxiom aboxAxiom; |
| 158 | String trackingIRI; | 177 | String trackingIRI; |
| 159 | Set<Integer> trackedIDEqualities = new HashSet<Integer>(); | 178 | Set<Integer> trackedIDEqualities = new HashSet<Integer>(); |
| 160 | Set<String> trackedEntityEqualities = new HashSet<String>(); | 179 | Set<String> trackedEntityEqualities = new HashSet<String>(); |
| 161 | TupleIterator trackingAnswers, lowerAnswers; | 180 | TupleIterator trackingAnswers, lowerAnswers; |
| 162 | 181 | ||
| 163 | for (Iterator<String> iter = binaryPredicates.iterator(); iter.hasNext(); ) { | 182 | for (Iterator<String> iter = binaryPredicates.iterator(); iter.hasNext(); ) { |
| 164 | trackingIRI = iter.next(); | 183 | trackingIRI = iter.next(); |
| 165 | String propIRI = m_encoder.getOriginalPredicate(trackingIRI); | 184 | String propIRI = m_encoder.getOriginalPredicate(trackingIRI); |
| 166 | if (propIRI == null) continue; | 185 | if(propIRI == null) continue; |
| 167 | if (!propIRI.equals(Namespace.EQUALITY_QUOTED)) continue; | 186 | if (!propIRI.equals(Namespace.EQUALITY_QUOTED)) continue; |
| 168 | trackingAnswers = null; | 187 | trackingAnswers = null; |
| 169 | try { | 188 | try { |
| 170 | trackingAnswers = trackingStore.internal_evaluateAgainstIDBs(getSPARQLQuery4Binary(trackingIRI)); | 189 | trackingAnswers = trackingStore.internal_evaluateAgainstIDBs(getSPARQLQuery4Binary(trackingIRI)); |
| 171 | for (long multi = trackingAnswers.open(); multi != 0; multi = trackingAnswers.getNext()) { | 190 | for (long multi = trackingAnswers.open(); multi != 0; multi = trackingAnswers.getNext()) { |
| 172 | if (trackingAnswers.getResourceID(0) != trackingAnswers.getResourceID(1)) { | 191 | if (trackingAnswers.getResourceID(0) != trackingAnswers.getResourceID(1)) { |
| 173 | for (int i = 0; i < 2; ++i) | 192 | for(int i = 0; i < 2; ++i) |
| 174 | if (trackedIDEqualities.add(trackingAnswers.getResourceID(i))) { | 193 | if (trackedIDEqualities.add(trackingAnswers.getResourceID(i))) { |
| 175 | trackedEntityEqualities.add(trackingAnswers.getResource(i).m_lexicalForm); | 194 | trackedEntityEqualities.add(trackingAnswers.getResource(i).m_lexicalForm); |
| 176 | } | 195 | } |
| @@ -179,71 +198,74 @@ public class QueryTracker { | |||
| 179 | } catch (JRDFStoreException e) { | 198 | } catch (JRDFStoreException e) { |
| 180 | e.printStackTrace(); | 199 | e.printStackTrace(); |
| 181 | } finally { | 200 | } finally { |
| 182 | if (trackingAnswers != null) trackingAnswers.dispose(); | 201 | if(trackingAnswers != null) trackingAnswers.dispose(); |
| 183 | } | 202 | } |
| 184 | iter.remove(); | 203 | iter.remove(); |
| 185 | break; | 204 | break; |
| 186 | } | 205 | } |
| 187 | 206 | ||
| 188 | String sub_rep, obj_rep; | 207 | String sub_rep, obj_rep; |
| 189 | 208 | ||
| 190 | for (Iterator<String> iter = binaryPredicates.iterator(); iter.hasNext(); ) { | 209 | for (Iterator<String> iter = binaryPredicates.iterator(); iter.hasNext(); ) { |
| 191 | trackingIRI = iter.next(); | 210 | trackingIRI = iter.next(); |
| 192 | count = 0; | 211 | count = 0; |
| 193 | String propIRI = m_encoder.getOriginalPredicate(trackingIRI); | 212 | String propIRI = m_encoder.getOriginalPredicate(trackingIRI); |
| 194 | if (propIRI == null) continue; | 213 | if(propIRI == null) continue; |
| 195 | iter.remove(); | 214 | iter.remove(); |
| 196 | lowerAnswers = null; trackingAnswers = null; | 215 | lowerAnswers = null; |
| 216 | trackingAnswers = null; | ||
| 197 | Set<String> lower = new HashSet<String>(); | 217 | Set<String> lower = new HashSet<String>(); |
| 198 | OWLObject prop = null; | 218 | OWLObject prop = null; |
| 199 | try { | 219 | try { |
| 200 | trackingAnswers = trackingStore.internal_evaluateAgainstIDBs(getSPARQLQuery4Binary(trackingIRI)); | 220 | trackingAnswers = trackingStore.internal_evaluateAgainstIDBs(getSPARQLQuery4Binary(trackingIRI)); |
| 201 | trackingAnswers.open(); | 221 | trackingAnswers.open(); |
| 202 | if (trackingAnswers.getMultiplicity() == 0) continue; | 222 | if(trackingAnswers.getMultiplicity() == 0) continue; |
| 203 | 223 | ||
| 204 | lowerAnswers = m_dataStore.internal_evaluateNotExpanded(getSPARQLQuery4Binary(propIRI)); | 224 | lowerAnswers = m_dataStore.internal_evaluateNotExpanded(getSPARQLQuery4Binary(propIRI)); |
| 205 | lowerAnswers.open(); | 225 | lowerAnswers.open(); |
| 206 | if (lowerAnswers.getMultiplicity() == 0) continue; | 226 | if(lowerAnswers.getMultiplicity() == 0) continue; |
| 207 | 227 | ||
| 208 | StringBuilder builder = new StringBuilder(); | 228 | StringBuilder builder = new StringBuilder(); |
| 209 | for (long multi = lowerAnswers.getMultiplicity(); multi != 0; multi = lowerAnswers.getNext()) { | 229 | for (long multi = lowerAnswers.getMultiplicity(); multi != 0; multi = lowerAnswers.getNext()) { |
| 210 | sub = lowerAnswers.getResource(0); | 230 | sub = lowerAnswers.getResource(0); |
| 211 | obj = lowerAnswers.getResource(1); | 231 | obj = lowerAnswers.getResource(1); |
| 212 | builder.setLength(0); | 232 | builder.setLength(0); |
| 213 | builder.append(equalityGroups.find(sub.m_lexicalForm)).append(AnswerTuple.SEPARATOR).append(equalityGroups.find(obj.m_lexicalForm)); | 233 | builder.append(equalityGroups.find(sub.m_lexicalForm)) |
| 234 | .append(AnswerTuple.SEPARATOR) | ||
| 235 | .append(equalityGroups.find(obj.m_lexicalForm)); | ||
| 214 | lower.add(builder.toString()); | 236 | lower.add(builder.toString()); |
| 215 | } | 237 | } |
| 216 | 238 | ||
| 217 | for (long multi = trackingAnswers.getMultiplicity(); multi != 0; multi = trackingAnswers.getNext()) { | 239 | for (long multi = trackingAnswers.getMultiplicity(); multi != 0; multi = trackingAnswers.getNext()) { |
| 218 | sub = trackingAnswers.getResource(0); | 240 | sub = trackingAnswers.getResource(0); |
| 219 | obj = trackingAnswers.getResource(1); | 241 | obj = trackingAnswers.getResource(1); |
| 220 | builder.setLength(0); | 242 | builder.setLength(0); |
| 221 | sub_rep = equalityGroups.find(sub.m_lexicalForm); | 243 | sub_rep = equalityGroups.find(sub.m_lexicalForm); |
| 222 | obj_rep = equalityGroups.find(obj.m_lexicalForm); | 244 | obj_rep = equalityGroups.find(obj.m_lexicalForm); |
| 223 | if (!sub_rep.equals(sub.m_lexicalForm) || !obj_rep.equals(obj.m_lexicalForm)) continue; | 245 | if (!sub_rep.equals(sub.m_lexicalForm) || !obj_rep.equals(obj.m_lexicalForm)) continue; |
| 224 | 246 | ||
| 225 | builder.append(sub_rep).append(AnswerTuple.SEPARATOR).append(obj_rep); | 247 | builder.append(sub_rep).append(AnswerTuple.SEPARATOR).append(obj_rep); |
| 226 | if (lower.contains(builder.toString())) { | 248 | if (lower.contains(builder.toString())) { |
| 227 | OWLObject owlObj = getOWLObject(obj, factory); | 249 | OWLObject owlObj = getOWLObject(obj, factory); |
| 228 | if (owlObj instanceof OWLIndividual) { | 250 | if (owlObj instanceof OWLIndividual) { |
| 229 | if (prop == null) | 251 | if (prop == null) |
| 230 | prop = factory.getOWLObjectProperty(IRI.create(propIRI.startsWith("<") ? OWLHelper.removeAngles(propIRI) : propIRI)); | 252 | prop = factory.getOWLObjectProperty(IRI.create(propIRI.startsWith("<") ? OWLHelper.removeAngles(propIRI) : propIRI)); |
| 231 | aboxAxiom = factory.getOWLObjectPropertyAssertionAxiom( | 253 | aboxAxiom = factory.getOWLObjectPropertyAssertionAxiom( |
| 232 | (OWLObjectProperty) prop, | 254 | (OWLObjectProperty) prop, |
| 233 | factory.getOWLNamedIndividual(IRI.create(sub_rep)), | 255 | factory.getOWLNamedIndividual(IRI.create(sub_rep)), |
| 234 | factory.getOWLNamedIndividual(IRI.create(obj_rep))); | 256 | factory.getOWLNamedIndividual(IRI.create(obj_rep))); |
| 235 | } | 257 | } |
| 236 | else if (owlObj instanceof OWLLiteral) { | 258 | else if (owlObj instanceof OWLLiteral) { |
| 237 | if (prop == null) | 259 | if (prop == null) |
| 238 | prop = factory.getOWLDataProperty(IRI.create(propIRI.startsWith("<") ? OWLHelper.removeAngles(propIRI) : propIRI)); | 260 | prop = factory.getOWLDataProperty(IRI.create(propIRI.startsWith("<") ? OWLHelper.removeAngles(propIRI) : propIRI)); |
| 239 | aboxAxiom = factory.getOWLDataPropertyAssertionAxiom( | 261 | aboxAxiom = factory.getOWLDataPropertyAssertionAxiom( |
| 240 | (OWLDataProperty) prop, | 262 | (OWLDataProperty) prop, |
| 241 | factory.getOWLNamedIndividual(IRI.create(sub_rep)), | 263 | factory.getOWLNamedIndividual(IRI.create(sub_rep)), |
| 242 | (OWLLiteral) owlObj); | 264 | (OWLLiteral) owlObj); |
| 243 | } | 265 | } |
| 244 | else { | 266 | else { |
| 245 | Utility.logError("There might be an error here ... "); | 267 | Utility.logError("There might be an error here ... "); |
| 246 | continue; | 268 | continue; |
| 247 | } | 269 | } |
| 248 | if (!fragment.containsAxiom(aboxAxiom)) { | 270 | if (!fragment.containsAxiom(aboxAxiom)) { |
| 249 | m_manager.addAxiom(fragment, aboxAxiom); | 271 | m_manager.addAxiom(fragment, aboxAxiom); |
| @@ -259,30 +281,30 @@ public class QueryTracker { | |||
| 259 | if (lowerAnswers != null) lowerAnswers.dispose(); | 281 | if (lowerAnswers != null) lowerAnswers.dispose(); |
| 260 | lower.clear(); | 282 | lower.clear(); |
| 261 | } | 283 | } |
| 262 | Utility.logDebug("property: " + propIRI + " " + count); | 284 | Utility.logDebug("property: " + propIRI + " " + count); |
| 263 | } | 285 | } |
| 264 | 286 | ||
| 265 | count = 0; | 287 | count = 0; |
| 266 | String value; | 288 | String value; |
| 267 | OWLObjectProperty sameAs = factory.getOWLObjectProperty(IRI.create(Namespace.EQUALITY)); | 289 | OWLObjectProperty sameAs = factory.getOWLObjectProperty(IRI.create(Namespace.EQUALITY)); |
| 268 | for (String key: equalityGroups.keySet()) { | 290 | for (String key: equalityGroups.keySet()) { |
| 269 | if (!trackedEntityEqualities.contains(key)) continue; | 291 | if(!trackedEntityEqualities.contains(key)) continue; |
| 270 | value = equalityGroups.find(key); | 292 | value = equalityGroups.find(key); |
| 271 | m_manager.addAxiom(fragment, factory.getOWLObjectPropertyAssertionAxiom( | 293 | m_manager.addAxiom(fragment, factory.getOWLObjectPropertyAssertionAxiom( |
| 272 | sameAs, | 294 | sameAs, |
| 273 | factory.getOWLNamedIndividual(IRI.create(key)), | 295 | factory.getOWLNamedIndividual(IRI.create(key)), |
| 274 | factory.getOWLNamedIndividual(IRI.create(value)))); | 296 | factory.getOWLNamedIndividual(IRI.create(value)))); |
| 275 | ++aboxAxiomCounter; | 297 | ++aboxAxiomCounter; |
| 276 | ++count; | 298 | ++count; |
| 277 | } | 299 | } |
| 278 | Utility.logDebug("property: " + Namespace.EQUALITY_QUOTED + " " + count); | 300 | Utility.logDebug("property: " + Namespace.EQUALITY_QUOTED + " " + count); |
| 279 | 301 | ||
| 280 | trackedEntityEqualities.clear(); | 302 | trackedEntityEqualities.clear(); |
| 281 | trackedIDEqualities.clear(); | 303 | trackedIDEqualities.clear(); |
| 282 | Utility.logTrace(Namespace.EQUALITY_QUOTED + " " + count); | 304 | Utility.logTrace(Namespace.EQUALITY_QUOTED + " " + count); |
| 283 | 305 | ||
| 284 | Utility.logDebug("ABox extraction Done"); | 306 | Utility.logDebug("ABox extraction Done"); |
| 285 | return aboxAxiomCounter; | 307 | return aboxAxiomCounter; |
| 286 | } | 308 | } |
| 287 | 309 | ||
| 288 | private OWLObject getOWLObject(Resource rdfoxTerm, OWLDataFactory factory) { | 310 | private OWLObject getOWLObject(Resource rdfoxTerm, OWLDataFactory factory) { |
| @@ -298,15 +320,17 @@ public class QueryTracker { | |||
| 298 | // rdfoxTerm.m_datatype.equals(Datatype.XSD_UNSIGNED_BYTE)) | 320 | // rdfoxTerm.m_datatype.equals(Datatype.XSD_UNSIGNED_BYTE)) |
| 299 | if (rdfoxTerm.m_datatype.equals(Datatype.XSD_DATE)) | 321 | if (rdfoxTerm.m_datatype.equals(Datatype.XSD_DATE)) |
| 300 | return factory.getOWLLiteral(rdfoxTerm.m_lexicalForm, factory.getOWLDatatype(IRI.create(Namespace.XSD_STRING))); | 322 | return factory.getOWLLiteral(rdfoxTerm.m_lexicalForm, factory.getOWLDatatype(IRI.create(Namespace.XSD_STRING))); |
| 301 | 323 | ||
| 302 | else return factory.getOWLLiteral(rdfoxTerm.m_lexicalForm, factory.getOWLDatatype(IRI.create(rdfoxTerm.m_datatype.getIRI()))); | 324 | else |
| 325 | return factory.getOWLLiteral(rdfoxTerm.m_lexicalForm, factory.getOWLDatatype(IRI.create(rdfoxTerm.m_datatype | ||
| 326 | .getIRI()))); | ||
| 303 | } | 327 | } |
| 304 | 328 | ||
| 305 | private int extractUnaryTuples(BasicQueryEngine trackingStore, OWLDataFactory factory, Set<String> unaryPredicates) { | 329 | private int extractUnaryTuples(BasicQueryEngine trackingStore, OWLDataFactory factory, Set<String> unaryPredicates) { |
| 306 | OWLOntology fragment = m_record.getRelevantOntology(); | 330 | OWLOntology fragment = m_record.getRelevantOntology(); |
| 307 | int count; | 331 | int count; |
| 308 | int aboxAxiomCounter = 0; | 332 | int aboxAxiomCounter = 0; |
| 309 | String answer; | 333 | String answer; |
| 310 | OWLAxiom aboxAxiom; | 334 | OWLAxiom aboxAxiom; |
| 311 | for (String trackingIRI : unaryPredicates) { | 335 | for (String trackingIRI : unaryPredicates) { |
| 312 | count = 0; | 336 | count = 0; |
| @@ -319,12 +343,12 @@ public class QueryTracker { | |||
| 319 | try { | 343 | try { |
| 320 | answers = trackingStore.internal_evaluateAgainstIDBs(getSPARQLQuery4Unary(trackingIRI)); | 344 | answers = trackingStore.internal_evaluateAgainstIDBs(getSPARQLQuery4Unary(trackingIRI)); |
| 321 | answers.open(); | 345 | answers.open(); |
| 322 | if (answers.getMultiplicity() == 0) continue; | 346 | if(answers.getMultiplicity() == 0) continue; |
| 323 | 347 | ||
| 324 | lowerAnswers = m_dataStore.internal_evaluateNotExpanded(getSPARQLQuery4Unary(clsIRI)); | 348 | lowerAnswers = m_dataStore.internal_evaluateNotExpanded(getSPARQLQuery4Unary(clsIRI)); |
| 325 | lowerAnswers.open(); | 349 | lowerAnswers.open(); |
| 326 | if (lowerAnswers.getMultiplicity() == 0) continue; | 350 | if (lowerAnswers.getMultiplicity() == 0) continue; |
| 327 | 351 | ||
| 328 | for (long multi = lowerAnswers.getMultiplicity(); multi != 0; multi = lowerAnswers.getNext()) | 352 | for (long multi = lowerAnswers.getMultiplicity(); multi != 0; multi = lowerAnswers.getNext()) |
| 329 | lower.add(equalityGroups.find(lowerAnswers.getResource(0).m_lexicalForm)); | 353 | lower.add(equalityGroups.find(lowerAnswers.getResource(0).m_lexicalForm)); |
| 330 | 354 | ||
| @@ -386,25 +410,6 @@ public class QueryTracker { | |||
| 386 | } | 410 | } |
| 387 | } | 411 | } |
| 388 | 412 | ||
| 389 | public void addRelatedAxiomsAndClauses(QueryRecord[] botQueryRecords) { | ||
| 390 | LinkedList<QueryRecord> toAddedRecords = new LinkedList<QueryRecord>(); | ||
| 391 | |||
| 392 | for (QueryRecord botQueryRecord : botQueryRecords) | ||
| 393 | if (overlappingDisjunctiveClauses(botQueryRecord) != null) | ||
| 394 | toAddedRecords.add(botQueryRecord); | ||
| 395 | |||
| 396 | for (QueryRecord botQueryRecord : toAddedRecords) { | ||
| 397 | m_manager.addAxioms(m_record.getRelevantOntology(), botQueryRecord.getRelevantOntology().getAxioms()); | ||
| 398 | for (DLClause clause : botQueryRecord.getRelevantClauses()) | ||
| 399 | m_record.addRelevantClauses(clause); | ||
| 400 | } | ||
| 401 | |||
| 402 | if (!toAddedRecords.isEmpty()) | ||
| 403 | Utility.logDebug("Part of bottom fragments is added for this query."); | ||
| 404 | else | ||
| 405 | Utility.logDebug("None of bottom fragments is added for this query."); | ||
| 406 | } | ||
| 407 | |||
| 408 | private Set<DLClause> overlappingDisjunctiveClauses( | 413 | private Set<DLClause> overlappingDisjunctiveClauses( |
| 409 | QueryRecord botQueryRecord) { | 414 | QueryRecord botQueryRecord) { |
| 410 | if (m_tBoxAxioms == null) | 415 | if (m_tBoxAxioms == null) |
diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoder.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoder.java index d8ebc55..d05731a 100644 --- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoder.java +++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoder.java | |||
| @@ -311,7 +311,7 @@ public abstract class TrackingRuleEncoder extends Disposable { | |||
| 311 | } | 311 | } |
| 312 | } | 312 | } |
| 313 | 313 | ||
| 314 | Utility.logInfo(addedData.size() + " triples are added into the store."); | 314 | Utility.logDebug(addedData.size() + " triples are added into the store."); |
| 315 | } | 315 | } |
| 316 | 316 | ||
| 317 | protected DLPredicate getGapDLPredicate(DLPredicate dlPredicate) { | 317 | protected DLPredicate getGapDLPredicate(DLPredicate dlPredicate) { |
