From de3749532d060f26c966a81c03f9a5d846c33d06 Mon Sep 17 00:00:00 2001 From: RncLsn Date: Thu, 28 May 2015 17:11:35 +0100 Subject: Merged updates from upstream. --- src/uk/ac/ox/cs/pagoda/endomorph/Clique.java | 5 +- .../endomorph/plan/OpenEndMultiThreadPlan.java | 85 ++++++------ .../ox/cs/pagoda/endomorph/plan/OpenEndPlan.java | 74 ++++++---- .../ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java | 6 +- src/uk/ac/ox/cs/pagoda/query/GapByStore4ID.java | 120 ++++++++-------- src/uk/ac/ox/cs/pagoda/query/GapByStore4ID2.java | 141 +++++++++++++++++++ src/uk/ac/ox/cs/pagoda/query/GapByTriple.java | 40 ++---- src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java | 6 +- .../ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java | 5 +- .../cs/pagoda/reasoner/light/BasicQueryEngine.java | 154 ++++++++++----------- src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java | 2 +- .../tracking/TrackingRuleEncoderWithGap.java | 38 ++--- src/uk/ac/ox/cs/pagoda/util/Utility.java | 104 ++++++-------- 13 files changed, 453 insertions(+), 327 deletions(-) create mode 100644 src/uk/ac/ox/cs/pagoda/query/GapByStore4ID2.java (limited to 'src/uk/ac/ox') diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java b/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java index 1c269ea..9b0d88e 100644 --- a/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java +++ b/src/uk/ac/ox/cs/pagoda/endomorph/Clique.java @@ -1,11 +1,11 @@ package uk.ac.ox.cs.pagoda.endomorph; +import uk.ac.ox.cs.pagoda.summary.NodeTuple; + import java.util.Collection; import java.util.HashSet; import java.util.Set; -import uk.ac.ox.cs.pagoda.summary.NodeTuple; - public class Clique { NodeTuple representative; Set nodeTuples = null; @@ -13,6 +13,7 @@ public class Clique { public Clique(NodeTuple u) { nodeTuples = new HashSet(); representative = u; + nodeTuples.add(u); } public boolean addNodeTuple(NodeTuple nodeTuple) { diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndMultiThreadPlan.java b/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndMultiThreadPlan.java index 8c7ce6a..862fdc8 100644 --- a/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndMultiThreadPlan.java +++ b/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndMultiThreadPlan.java @@ -1,88 +1,81 @@ package uk.ac.ox.cs.pagoda.endomorph.plan; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.atomic.AtomicInteger; - -import uk.ac.ox.cs.pagoda.endomorph.DependencyGraph; import uk.ac.ox.cs.pagoda.endomorph.Clique; +import uk.ac.ox.cs.pagoda.endomorph.DependencyGraph; import uk.ac.ox.cs.pagoda.query.AnswerTuple; import uk.ac.ox.cs.pagoda.reasoner.full.Checker; import uk.ac.ox.cs.pagoda.reasoner.full.HermitChecker; import uk.ac.ox.cs.pagoda.util.Utility; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicInteger; + public class OpenEndMultiThreadPlan implements CheckPlan { - Checker checker; - DependencyGraph dGraph; - + Checker checker; + DependencyGraph dGraph; + // Clique[] topo; +// AtomicInteger open, end; + ConcurrentLinkedDeque topo; + Set validated, falsified; + AtomicInteger counter = new AtomicInteger(); + public OpenEndMultiThreadPlan(Checker checker, DependencyGraph dGraph) { - this.checker = checker; + this.checker = checker; this.dGraph = dGraph; } - -// Clique[] topo; -// AtomicInteger open, end; - ConcurrentLinkedDeque topo; - - Set validated, falsified; - + @Override public int check() { Collection cliques = dGraph.getTopologicalOrder(); // topo = new LinkedBlockingDeque(cliques); - topo = new ConcurrentLinkedDeque(cliques); - + topo = new ConcurrentLinkedDeque(cliques); + // topo = new Clique[cliques.size()]; -// int index = 0; +// int index = 0; // for (Clique clique: cliques) topo[index++] = clique; -// open = new AtomicInteger(); -// end = new AtomicInteger(cliques.size() - 1); - -// validated = Collections.synchronizedSet(new HashSet()); +// open = new AtomicInteger(); +// end = new AtomicInteger(cliques.size() - 1); + +// validated = Collections.synchronizedSet(new HashSet()); // falsified = Collections.synchronizedSet(new HashSet()); - validated = Collections.newSetFromMap(new ConcurrentHashMap()); - falsified = Collections.newSetFromMap(new ConcurrentHashMap()); + validated = Collections.newSetFromMap(new ConcurrentHashMap()); + falsified = Collections.newSetFromMap(new ConcurrentHashMap()); - int numOfThreads = 10; - Collection threads = new LinkedList(); - for (int i = 0; i < numOfThreads; ++i) + int numOfThreads = 10; + Collection threads = new LinkedList(); + for(int i = 0; i < numOfThreads; ++i) threads.add(new Thread(new SubThread(new HermitChecker(checker), i))); - + for (Thread thread: threads) thread.start(); - + for (Thread thread: threads) { try { thread.join(); } catch (InterruptedException e) { e.printStackTrace(); - } + } } - - Utility.logDebug("HermiT was called " + counter.get() + " times."); - - int count = 0; + + Utility.logDebug("HermiT was called " + counter.get() + " times."); + + int count = 0; for (Clique c: dGraph.getTopologicalOrder()) { if (validated.contains(c)) - count += c.getNodeTuples().size() + 1; + count += c.getNodeTuples().size(); } - return count; + return count; } private void setMarkCascadely(Clique clique, Set marked, Map> edges) { - marked.add(clique); + marked.add(clique); if (edges.containsKey(clique)) for (Clique c: edges.get(clique)) - if (!marked.contains(c)) + if(!marked.contains(c)) setMarkCascadely(c, marked, edges); } - - AtomicInteger counter = new AtomicInteger(); class SubThread implements Runnable { diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndPlan.java b/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndPlan.java index 19d567a..3294c31 100644 --- a/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndPlan.java +++ b/src/uk/ac/ox/cs/pagoda/endomorph/plan/OpenEndPlan.java @@ -17,37 +17,39 @@ public class OpenEndPlan implements CheckPlan { public static final int TIME_OUT_MIN = 1; Checker checker; - DependencyGraph dGraph; - QueryRecord m_record; - + DependencyGraph dGraph; + QueryRecord m_record; + int m_answerArity; + Set validated = new HashSet(); + Set falsified = new HashSet(); + Set passedAnswers = new HashSet(); public OpenEndPlan(Checker checker, DependencyGraph dGraph, QueryRecord record) { - this.checker = checker; + this.checker = checker; this.dGraph = dGraph; - m_record = record; + m_record = record; + m_answerArity = record.getAnswerVariables().length; } @Override public int check() { - Deque topo = new LinkedList(dGraph.getTopologicalOrder()); + LinkedList topo = new LinkedList(dGraph.getTopologicalOrder()); Utility.logInfo("Entrances: " + dGraph.getEntrances().size() + " Exists: " + dGraph.getExits().size()); - Set validated = new HashSet(); - Set falsified = new HashSet(); boolean flag = true; - Clique clique; - Timer t = new Timer(); - + Clique clique; + Timer t = new Timer(); - AnswerTuple answerTuple; + AnswerTuple answerTuple; while (!topo.isEmpty()) { if (flag) { - clique = topo.removeFirst(); + clique = topo.removeFirst(); + if(redundant(clique)) continue; if (validated.contains(clique)) continue; if (falsified.contains(clique)) { flag = false; continue; } Utility.logDebug("start checking front ... " + (answerTuple = clique.getRepresentative().getAnswerTuple())); if (checker.check(answerTuple)) { Utility.logDebug(answerTuple.toString() + " is verified."); - setMarkCascadely(clique, validated, dGraph.getOutGoingEdges()); + setMarkCascadelyValidated(clique); } else { falsified.add(clique); @@ -58,12 +60,12 @@ public class OpenEndPlan implements CheckPlan { clique = topo.removeLast(); if (falsified.contains(clique)) continue; if (validated.contains(clique)) { flag = true; continue; } - Utility.logDebug("start checking back ... " + (answerTuple = clique.getRepresentative().getAnswerTuple())); - if (!checker.check(answerTuple)) - setMarkCascadely(clique, falsified, dGraph.getInComingEdges()); + Utility.logDebug("start checking back ... " + (answerTuple = clique.getRepresentative().getAnswerTuple())); + if(!checker.check(answerTuple)) + setMarkCascadelyFasified(clique); else { Utility.logDebug(answerTuple.toString() + " is verified."); - validated.add(clique); + addProjections(clique); flag = true; } } @@ -76,9 +78,8 @@ public class OpenEndPlan implements CheckPlan { Collection validAnswers = new LinkedList(); for (Clique c: dGraph.getTopologicalOrder()) if (validated.contains(c)) { - count += c.getNodeTuples().size() + 1; - validAnswers.add(c.getRepresentative().getAnswerTuple()); - + count += c.getNodeTuples().size(); +// validAnswers.add(c.getRepresentative().getAnswerTuple()); for (NodeTuple nodeTuple: c.getNodeTuples()) { ans = nodeTuple.getAnswerTuple(); validAnswers.add(ans); @@ -91,12 +92,35 @@ public class OpenEndPlan implements CheckPlan { return count; } - private void setMarkCascadely(Clique clique, Set marked, Map> edges) { - marked.add(clique); + private boolean redundant(Clique clique) { + for(NodeTuple nodeTuple : clique.getNodeTuples()) + if(!passedAnswers.contains(AnswerTuple.getInstance(nodeTuple.getAnswerTuple(), m_answerArity))) + return false; + return true; + } + + private void addProjections(Clique clique) { + for(NodeTuple nodeTuple : clique.getNodeTuples()) + passedAnswers.add(AnswerTuple.getInstance(nodeTuple.getAnswerTuple(), m_answerArity)); + } + + private void setMarkCascadelyValidated(Clique clique) { + validated.add(clique); + addProjections(clique); + Map> edges = dGraph.getOutGoingEdges(); if (edges.containsKey(clique)) for (Clique c: edges.get(clique)) - if (!marked.contains(c)) - setMarkCascadely(c, marked, edges); + if(!validated.contains(c)) + setMarkCascadelyValidated(c); + } + + private void setMarkCascadelyFasified(Clique clique) { + falsified.add(clique); + Map> edges = dGraph.getInComingEdges(); + if(edges.containsKey(clique)) + for(Clique c : edges.get(clique)) + if(!falsified.contains(c)) + setMarkCascadelyFasified(c); } } diff --git a/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java b/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java index d6067d0..5e1a700 100644 --- a/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java +++ b/src/uk/ac/ox/cs/pagoda/endomorph/plan/PlainPlan.java @@ -1,12 +1,12 @@ package uk.ac.ox.cs.pagoda.endomorph.plan; -import java.util.Set; - import uk.ac.ox.cs.pagoda.endomorph.Clique; import uk.ac.ox.cs.pagoda.reasoner.full.Checker; import uk.ac.ox.cs.pagoda.summary.NodeTuple; import uk.ac.ox.cs.pagoda.util.Utility; +import java.util.Set; + public class PlainPlan implements CheckPlan { Checker checker; @@ -22,7 +22,7 @@ public class PlainPlan implements CheckPlan { int count = 0; for (Clique clique: toCheck) if (checker.check(clique.getRepresentative().getAnswerTuple())) { - count += clique.getNodeTuples().size() + 1; + count += clique.getNodeTuples().size(); for (NodeTuple nodeTuple: clique.getNodeTuples()) Utility.logDebug(nodeTuple.getAnswerTuple().toString()); } diff --git a/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID.java b/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID.java index 1c0eb48..02db18d 100644 --- a/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID.java +++ b/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID.java @@ -1,103 +1,106 @@ package uk.ac.ox.cs.pagoda.query; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.Map; - +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; import uk.ac.ox.cs.pagoda.MyPrefixes; -//import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID; import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager; +import uk.ac.ox.cs.pagoda.util.Namespace; import uk.ac.ox.cs.pagoda.util.Timer; import uk.ac.ox.cs.pagoda.util.Utility; -import uk.ac.ox.cs.JRDFox.JRDFStoreException; -import uk.ac.ox.cs.JRDFox.store.DataStore; -import uk.ac.ox.cs.JRDFox.store.TupleIterator; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; + +//import uk.ac.ox.cs.pagoda.multistage.AnswerTupleID; //public class GapByStore4ID extends GapTupleIterator { public class GapByStore4ID extends GapTupleIterator { - private MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes; - private TupleIterator iterator = null; + protected MyPrefixes prefixes = MyPrefixes.PAGOdAPrefixes; + protected TupleIterator iterator = null; // AnswerTupleID tuple; - int[] tuple; - private BasicQueryEngine m_engine; - private DataStore m_store; - private RDFoxTripleManager tripleManager; +protected int[] tuple; + protected BasicQueryEngine m_engine; + protected DataStore m_store; + protected RDFoxTripleManager tripleManager; + protected long multi; + Map original2gap = new HashMap(); + LinkedList predicatesWithGap = new LinkedList(); public GapByStore4ID(BasicQueryEngine engine) { - m_engine = engine; - m_store = engine.getDataStore(); - tripleManager = new RDFoxTripleManager(m_store, false); + m_engine = engine; + m_store = engine.getDataStore(); + tripleManager = new RDFoxTripleManager(m_store, false); } - long multi; - @Override public void compile(String program) throws JRDFStoreException { - clear(); + clear(); - boolean incrementally = true; + boolean incrementally = true; Timer t = new Timer(); long oldTripleCount = m_store.getTriplesCount(); - + if (program != null) { // m_store.addRules(new String[] {program}); m_store.importRules(program); - incrementally = false; + incrementally = false; } - + m_store.applyReasoning(incrementally); - + long tripleCount = m_store.getTriplesCount(); - - Utility.logDebug("current store after materialising upper related rules: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", + + Utility.logDebug("current store after materialising upper related rules: " + tripleCount + " (" + (tripleCount - oldTripleCount) + " new)", "current store finished the materialisation of upper related rules in " + t.duration() + " seconds."); - + m_engine.setExpandEquality(false); iterator = m_engine.internal_evaluateAgainstIDBs("select ?x ?y ?z where { ?x ?y ?z . }"); m_engine.setExpandEquality(true); - + multi = iterator.open(); - Utility.logDebug("gap query evaluted ..."); + Utility.logDebug("gap query evaluted ..."); } @Override public boolean hasNext() { - if (iterator == null) return false; + if(iterator == null) return false; try { // tuple = new AnswerTupleID(3); - tuple = new int[3]; - Integer predicate; + tuple = new int[3]; + Integer predicate; for (; multi != 0; multi = iterator.getNext()) { for (int i = 0; i < 3; ++i) // tuple.setTerm(i, (int) iterator.getResourceID(i)); - tuple[i] = (int) iterator.getResourceID(i); - + tuple[i] = iterator.getResourceID(i); + if (isRDF_TYPE()) { // predicate = getGapPredicateID(tuple.getTerm(2)); - predicate = getGapPredicateID(tuple[2]); - if (predicate == null) continue; + predicate = getGapPredicateID(tuple[2]); + if(predicate == null) continue; // tuple.setTerm(2, predicate); - tuple[2] = predicate; + tuple[2] = predicate; } else { // predicate = getGapPredicateID(tuple.getTerm(1)); - predicate = getGapPredicateID(tuple[1]); - if (predicate == null) continue; + predicate = getGapPredicateID(tuple[1]); + if(predicate == null) continue; // tuple.setTerm(1, predicate); - tuple[1] = predicate; + tuple[1] = predicate; } - return true; + return true; } } catch (JRDFStoreException e) { e.printStackTrace(); - return false; + return false; } - return false; + return false; } - + @Override // public AnswerTupleID next() { public int[] next() { @@ -105,19 +108,16 @@ public class GapByStore4ID extends GapTupleIterator { multi = iterator.getNext(); } catch (JRDFStoreException e) { e.printStackTrace(); - } - - return tuple; + } + + return tuple; } - Map original2gap = new HashMap(); - LinkedList predicatesWithGap = new LinkedList(); - public LinkedList getPredicatesWithGap() { return predicatesWithGap; } - - private Integer getGapPredicateID(int originalID) { + + protected Integer getGapPredicateID(int originalID) { Integer gapID; if ((gapID = original2gap.get(originalID)) != null) return gapID; @@ -136,11 +136,14 @@ public class GapByStore4ID extends GapTupleIterator { return gapID; } - private boolean isAuxPredicate(String originalPredicate) { - return originalPredicate.contains("_AUX"); + protected boolean isAuxPredicate(String originalPredicate) { + if(originalPredicate.equals(Namespace.EQUALITY_QUOTED)) return false; + return originalPredicate.contains("_AUX") || + originalPredicate.startsWith("<" + Namespace.OWL_NS) || + originalPredicate.startsWith("<" + Namespace.PAGODA_ORIGINAL); } - private boolean isRDF_TYPE() { + protected boolean isRDF_TYPE() { // return tripleManager.isRdfTypeID(tuple.getTerm(1)); return tripleManager.isRdfTypeID(tuple[1]); } @@ -150,8 +153,6 @@ public class GapByStore4ID extends GapTupleIterator { Utility.logError("Unsupported operation!"); } - private boolean valid = false; - @Override public void save(String file) { Utility.logError("Unsupported Operation..."); @@ -168,7 +169,6 @@ public class GapByStore4ID extends GapTupleIterator { ++tupleCounter; tripleManager.addTripleByID(tuple); } - valid = true; long tripleCounter = m_store.getTriplesCount(); Utility.logDebug("There are " + tupleCounter + " tuples in the gap between lower and upper bound materialisation.", @@ -183,8 +183,6 @@ public class GapByStore4ID extends GapTupleIterator { } } - public boolean isValid() {return valid; } - @Override public void addTo(DataStore store) throws JRDFStoreException { Utility.logError("Unsupported Operation..."); diff --git a/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID2.java b/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID2.java new file mode 100644 index 0000000..19efc58 --- /dev/null +++ b/src/uk/ac/ox/cs/pagoda/query/GapByStore4ID2.java @@ -0,0 +1,141 @@ +package uk.ac.ox.cs.pagoda.query; + +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; +import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; +import uk.ac.ox.cs.pagoda.util.UFS; + +import java.util.*; + +public class GapByStore4ID2 extends GapByStore4ID { + + private BasicQueryEngine m_baseEngine; + private UFS m_equality = null, m_baseEquality = null; + private LinkedList toAddedIndividuals = null; + private TupleIterator iter_individual = null; + private int currentID = -1; + + public GapByStore4ID2(BasicQueryEngine engine, BasicQueryEngine baseEngine) { + super(engine); + m_baseEngine = baseEngine; + } + + @Override + public boolean hasNext() { + if(getNewGapTuple(iterator, -1)) return true; + if(iterator != null) { + iterator.dispose(); + iterator = null; + } + return getNextGapFactAboutEquality(); + } + + private boolean getNewGapTuple(TupleIterator it, int firstElement) { + if(it == null) return false; + int firstIndex = 0; + tuple = new int[3]; + if(firstElement > 0) { + tuple[0] = firstElement; + firstIndex = 1; + } + Integer predicate; + try { + for(; multi != 0; multi = it.getNext()) { + for(int i = firstIndex; i < 3; ++i) + tuple[i] = it.getResourceID(i - firstIndex); + + if(isRDF_TYPE()) { + predicate = getGapPredicateID(tuple[2]); + if(predicate == null) continue; + tuple[2] = predicate; + } else { + predicate = getGapPredicateID(tuple[1]); + if(predicate == null) continue; + tuple[1] = predicate; + } + return true; + } + } catch(JRDFStoreException e) { + e.printStackTrace(); + return false; + } + return false; + } + + private boolean getNextGapFactAboutEquality() { + if(toAddedIndividuals == null) { + m_equality = m_engine.getEqualityGroups(false); + m_baseEquality = m_baseEngine.getEqualityGroups(false); + toAddedIndividuals = new LinkedList(); + Map rep2cnt = new HashMap(); + Map rep2cnt_base = new HashMap(); + count(m_engine, m_equality, rep2cnt); + count(m_baseEngine, m_baseEquality, rep2cnt_base); + Set visitedrep = new HashSet(); + for(String individual : m_equality.keySet()) { + String rep = m_equality.find(individual); + if(visitedrep.contains(rep)) continue; + visitedrep.add(rep); + String rep_base = m_baseEquality.find(individual); + if(!rep2cnt.get(rep).equals(rep2cnt_base.get(rep_base))) { + toAddedIndividuals.add(rep); + } + } + + } + while(true) { + if(getNewGapTuple(iter_individual, currentID)) return true; + if(iter_individual != null) { + iter_individual.dispose(); + iter_individual = null; + } + if(toAddedIndividuals.isEmpty()) { + currentID = -1; + return false; + } + String individual = toAddedIndividuals.remove(); + currentID = tripleManager.getResourceID(individual); + try { + iter_individual = + m_engine.internal_evaluateNotExpanded(String.format("select distinct ?y ?z where { <%s> ?y ?z }", individual)); + multi = iter_individual.open(); + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + } + } + + private void count(BasicQueryEngine engine, UFS equality, Map map) { + for(String ind : equality.keySet()) { + Integer exist = map.get(ind); + if(exist == null) + map.put(equality.find(ind), 1); + else + map.put(equality.find(ind), ++exist); + } + } + + @Override + public int[] next() { + try { + if(iterator != null) + multi = iterator.getNext(); + else if(iter_individual != null) + multi = iter_individual.getNext(); + else + multi = 0; + } catch(JRDFStoreException e) { + e.printStackTrace(); + } + return tuple; + } + + public void clear() { + super.clear(); + if(iter_individual != null) { + iter_individual.dispose(); + iter_individual = null; + } + } + +} diff --git a/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java b/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java index a1c1b0e..eaa629b 100644 --- a/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java +++ b/src/uk/ac/ox/cs/pagoda/query/GapByTriple.java @@ -1,36 +1,29 @@ package uk.ac.ox.cs.pagoda.query; -import java.io.BufferedWriter; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.util.Collection; - -import org.semanticweb.HermiT.model.Atom; -import org.semanticweb.HermiT.model.AtomicConcept; -import org.semanticweb.HermiT.model.AtomicRole; -import org.semanticweb.HermiT.model.DLClause; -import org.semanticweb.HermiT.model.Individual; - +import org.semanticweb.HermiT.model.*; +import uk.ac.ox.cs.JRDFox.JRDFStoreException; +import uk.ac.ox.cs.JRDFox.Prefixes; +import uk.ac.ox.cs.JRDFox.store.DataStore; +import uk.ac.ox.cs.JRDFox.store.Parameters; +import uk.ac.ox.cs.JRDFox.store.TupleIterator; import uk.ac.ox.cs.pagoda.MyPrefixes; import uk.ac.ox.cs.pagoda.owl.OWLHelper; import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.reasoner.light.RDFoxTripleManager; import uk.ac.ox.cs.pagoda.util.Namespace; import uk.ac.ox.cs.pagoda.util.Utility; -import uk.ac.ox.cs.JRDFox.JRDFStoreException; -import uk.ac.ox.cs.JRDFox.Prefixes; -import uk.ac.ox.cs.JRDFox.store.DataStore; -import uk.ac.ox.cs.JRDFox.store.Parameters; -import uk.ac.ox.cs.JRDFox.store.TupleIterator; + +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.Collection; public class GapByTriple extends GapTupleIterator { - private static final String RDF_TYPE = Namespace.RDF_NS + "type"; - private static final String BRIEF_RDF_TYPE = "rdf:type"; - static final String allTripleQuery = "SELECT ?X ?Y ?Z WHERE { ?X ?Y ?Z }"; - + private static final String RDF_TYPE = Namespace.RDF_NS + "type"; + private static final String BRIEF_RDF_TYPE = "rdf:type"; DataStore lowerStore, upperStore; long multi; TupleIterator iterator; @@ -163,11 +156,6 @@ public class GapByTriple extends GapTupleIterator { addTo(upperStore); } - @Override - public boolean isValid() { - return true; - } - @Override public void clear() { iterator.dispose(); diff --git a/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java b/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java index 58303bb..2983b2e 100644 --- a/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java +++ b/src/uk/ac/ox/cs/pagoda/query/GapTupleIterator.java @@ -1,10 +1,10 @@ package uk.ac.ox.cs.pagoda.query; -import java.util.Iterator; - import uk.ac.ox.cs.JRDFox.JRDFStoreException; import uk.ac.ox.cs.JRDFox.store.DataStore; +import java.util.Iterator; + public abstract class GapTupleIterator implements Iterator { public static final String gapPredicateSuffix = "_AUXg"; @@ -17,8 +17,6 @@ public abstract class GapTupleIterator implements Iterator { public void compile(String programText) throws JRDFStoreException {} - public abstract boolean isValid(); - public abstract void save(String file); public abstract void addBackTo() throws JRDFStoreException; diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java index b5b9534..618fb70 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/MyQueryReasoner.java @@ -7,6 +7,7 @@ import uk.ac.ox.cs.pagoda.owl.EqualitiesEliminator; import uk.ac.ox.cs.pagoda.owl.OWLHelper; import uk.ac.ox.cs.pagoda.query.AnswerTuples; import uk.ac.ox.cs.pagoda.query.GapByStore4ID; +import uk.ac.ox.cs.pagoda.query.GapByStore4ID2; import uk.ac.ox.cs.pagoda.query.QueryRecord; import uk.ac.ox.cs.pagoda.query.QueryRecord.Step; import uk.ac.ox.cs.pagoda.reasoner.full.Checker; @@ -164,7 +165,9 @@ class MyQueryReasoner extends QueryReasoner { trackingStore.importRDFData(name, datafile); trackingStore.materialise("saturate named individuals", originalMarkProgram); - GapByStore4ID gap = new GapByStore4ID(trackingStore); +// materialiseFullUpper(); +// GapByStore4ID gap = new GapByStore4ID(trackingStore); + GapByStore4ID gap = new GapByStore4ID2(trackingStore, rlLowerStore); trackingStore.materialiseFoldedly(program, gap); predicatesWithGap = gap.getPredicatesWithGap(); gap.clear(); diff --git a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java index 5d2e411..9aa6235 100644 --- a/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java +++ b/src/uk/ac/ox/cs/pagoda/reasoner/light/BasicQueryEngine.java @@ -19,16 +19,18 @@ import java.util.*; public class BasicQueryEngine extends RDFoxQueryEngine { - protected DataStore store; - protected Parameters parameters = new Parameters(); + protected DataStore store; + protected Parameters parameters = new Parameters(); + Set materialisedRules = new HashSet(); + private UFS equalityGroups = null; public BasicQueryEngine(String name) { - super(name); + super(name); store = RDFoxQueryEngine.createDataStore(); - parameters.m_allAnswersInRoot = true; - parameters.m_useBushy = true; + parameters.m_allAnswersInRoot = true; + parameters.m_useBushy = true; } - + public void materialiseFoldedly(DatalogProgram dProgram, GapByStore4ID gap) { if (gap != null) { materialise("lower program", dProgram.getLower().toString()); @@ -40,10 +42,9 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } catch (JRDFStoreException e) { e.printStackTrace(); } finally { - gap.clear(); + gap.clear(); } - } - else + } else materialise("upper program", dProgram.getUpper().toString()); } @@ -58,24 +59,23 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } catch (JRDFStoreException e) { e.printStackTrace(); } finally { - gap.clear(); + gap.clear(); } - } - else + } else materialise("upper program", dProgram.getUpper().toString()); - - return 1; - } + return 1; + } + public int materialiseSkolemly(DatalogProgram dProgram, GapByStore4ID gap) { throw new UnsupportedOperationException(); } - + @Override public AnswerTuples evaluate(String queryText) { - return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); + return evaluate(queryText, ConjunctiveQueryHelper.getAnswerVariables(queryText)[0]); } - + @Override public AnswerTuples evaluate(String queryText, String[] answerVars) { TupleIterator tupleIterator; @@ -83,9 +83,9 @@ public class BasicQueryEngine extends RDFoxQueryEngine { tupleIterator = store.compileQuery(queryText.replace("_:", "?"), prefixes, parameters); } catch (JRDFStoreException e) { e.printStackTrace(); - return null; + return null; } - return new RDFoxAnswerTuples(answerVars, tupleIterator); + return new RDFoxAnswerTuples(answerVars, tupleIterator); } @Override @@ -95,17 +95,18 @@ public class BasicQueryEngine extends RDFoxQueryEngine { @Override public void dispose() { - store.dispose(); + store.dispose(); } protected void outputClassAssertions(String filename) { TupleIterator allTuples = null; - boolean redirect = false; + boolean redirect = false; try { allTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Z }", prefixes, parameters); redirect = Utility.redirectCurrentOut(filename); - for (long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) - System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager.getQuotedTerm(allTuples.getResource(1))); + for(long multi = allTuples.open(); multi != 0; multi = allTuples.getNext()) + System.out.println(RDFoxTripleManager.getQuotedTerm(allTuples.getResource(0)) + " " + RDFoxTripleManager + .getQuotedTerm(allTuples.getResource(1))); } catch (JRDFStoreException e) { e.printStackTrace(); } finally { @@ -123,137 +124,134 @@ public class BasicQueryEngine extends RDFoxQueryEngine { public void outputInstance4BinaryPredicate(String iri) { outputAnswers("select ?x ?y where { ?x <" + iri + "> ?y . }"); } - + public void outputInstanceNumbers(String filename) { TupleIterator predicateTuples = null; - TupleIterator instanceTuples; - Set number = new HashSet(); - String predicate; + TupleIterator instanceTuples; + Set number = new HashSet(); + String predicate; try { predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X <" + Namespace.RDF_TYPE + "> ?Y }", prefixes, parameters); for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); - instanceTuples = null; + instanceTuples = null; try { instanceTuples = getDataStore().compileQuery("SELECT ?X WHERE { ?X <" + Namespace.RDF_TYPE + "> " + predicate + " }", prefixes, parameters); - long totalCount = 0; + long totalCount = 0; for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) { totalCount += instanceTuples.getMultiplicity(); } number.add(predicate + " * " + totalCount); } finally { - if (instanceTuples != null) instanceTuples.dispose(); + if(instanceTuples != null) instanceTuples.dispose(); } } } catch (JRDFStoreException e) { e.printStackTrace(); - } finally { + } finally { if (predicateTuples != null) predicateTuples.dispose(); - predicateTuples = null; + predicateTuples = null; } - + try { predicateTuples = getDataStore().compileQuery("SELECT DISTINCT ?Y WHERE { ?X ?Y ?Z }", prefixes, parameters); for (long multi = predicateTuples.open(); multi != 0; multi = predicateTuples.getNext()) { - predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); - instanceTuples = null; + predicate = RDFoxTripleManager.getQuotedTerm(predicateTuples.getResource(0)); + instanceTuples = null; try { instanceTuples = getDataStore().compileQuery("SELECT ?X ?Z WHERE { ?X " + predicate + " ?Z }", prefixes, parameters); long totalCount = 0; for (long multi1 = instanceTuples.open(); multi1 != 0; multi1 = instanceTuples.getNext()) totalCount += instanceTuples.getMultiplicity(); - number.add(predicate + " * " + totalCount); + number.add(predicate + " * " + totalCount); } finally { - if (instanceTuples != null) instanceTuples.dispose(); + if(instanceTuples != null) instanceTuples.dispose(); } } - + } catch (JRDFStoreException e) { e.printStackTrace(); } finally { if (predicateTuples != null) predicateTuples.dispose(); predicateTuples = null; } - + Utility.redirectCurrentOut(filename); String[] ordered = number.toArray(new String[0]); Arrays.sort(ordered, new DLPredicateComparator()); - for (String line: ordered) System.out.println(line); + for(String line : ordered) System.out.println(line); Utility.closeCurrentOut(); - + } public TupleIterator internal_evaluateAgainstIDBs(String queryText) throws JRDFStoreException { TupleIterator iter = store.compileQuery(queryText, prefixes, parameters, TripleStatus.TUPLE_STATUS_IDB.union(TripleStatus.TUPLE_STATUS_EDB), TripleStatus.TUPLE_STATUS_IDB); // iter.open(); - return iter; + return iter; } public TupleIterator internal_evaluate(String queryText) throws JRDFStoreException { TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); -// iter.open(); - return iter; +// iter.open(); + return iter; } - + public void setExpandEquality(boolean flag) { - parameters.m_expandEquality = flag; + parameters.m_expandEquality = flag; } - + public TupleIterator internal_evaluateNotExpanded(String queryText) throws JRDFStoreException { - parameters.m_expandEquality = false; - TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); -// iter.open(); - parameters.m_expandEquality = true; + parameters.m_expandEquality = false; + TupleIterator iter = store.compileQuery(queryText, prefixes, parameters); +// iter.open(); + parameters.m_expandEquality = true; return iter; } - public TupleIterator internal_evaluate(String queryText, boolean incrementally) throws JRDFStoreException { return incrementally ? internal_evaluateAgainstIDBs(queryText) : internal_evaluate(queryText); } - Set materialisedRules = new HashSet(); - public String getUnusedRules(Collection clauses, boolean toUpdate) { DLClause clause; for (Iterator iter = clauses.iterator(); iter.hasNext(); ) { if (materialisedRules.contains(clause = iter.next())) iter.remove(); - else if (toUpdate) materialisedRules.add(clause); + else if(toUpdate) materialisedRules.add(clause); } - + if (clauses.isEmpty()) return null; - + return Program.toString(clauses); } public void outputMaterialisedRules() { - System.out.println(DLClauseHelper.toString(materialisedRules)); + System.out.println(DLClauseHelper.toString(materialisedRules)); } public void outputAnswers(String query) { TupleIterator iter = null; try { iter = internal_evaluate(query); - System.out.println(query); - int arity = iter.getArity(); + System.out.println(query); + int arity = iter.getArity(); for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { for (int i = 0; i < arity; ++i) System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); - System.out.println(); + System.out.println(); } } catch (JRDFStoreException e) { e.printStackTrace(); } finally { - if (iter != null) iter.dispose(); + if(iter != null) iter.dispose(); } } public void outputInstance4UnaryPredicate(String iri) { outputAnswers("select ?x where { ?x " + " <" - + iri - + "> .}"); + + iri + + "> .}"); } public void outputSubjects(String p, String o) { @@ -265,13 +263,13 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public void outputIDBFacts() { - TupleIterator iter = null; + TupleIterator iter = null; try { iter = internal_evaluateAgainstIDBs("select distict ?x ?y ?z where { ?x ?y ?z }"); for (long multi = iter.open(); multi != 0; multi = iter.getNext()) { for (int i = 0; i < 3; ++i) - System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); - System.out.println(); + System.out.print(RDFoxTripleManager.getQuotedTerm(iter.getResource(i)) + "\t"); + System.out.println(); } } catch (JRDFStoreException e) { // TODO Auto-generated catch block @@ -279,7 +277,7 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } finally { if (iter != null) iter.dispose(); } - + } public void outputType4Individual(String iri) { @@ -287,25 +285,23 @@ public class BasicQueryEngine extends RDFoxQueryEngine { } public int getSameAsNumber() { - TupleIterator iter = null; - int counter = 0; + TupleIterator iter = null; + int counter = 0; try { iter = internal_evaluate("select ?x ?y where {?x " + Namespace.EQUALITY_QUOTED + " ?y . }"); - for (long multi = iter.open(); multi != 0; multi = iter.getNext()) + for(long multi = iter.open(); multi != 0; multi = iter.getNext()) if (iter.getResourceID(0) != iter.getResourceID(1)) ++counter; } catch (JRDFStoreException e) { - e.printStackTrace(); + e.printStackTrace(); } finally { if (iter != null) iter.dispose(); } - return counter; + return counter; } - - private UFS equalityGroups = null; - - public UFS getEqualityGroups() { - if (equalityGroups != null) return equalityGroups; + + public UFS getEqualityGroups(boolean reuse) { + if(reuse && equalityGroups != null) return equalityGroups; equalityGroups = new UFS(); diff --git a/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java b/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java index ee55be7..d2d041f 100644 --- a/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java +++ b/src/uk/ac/ox/cs/pagoda/tracking/QueryTracker.java @@ -42,7 +42,7 @@ public class QueryTracker { m_record = queryRecord; m_manager = m_encoder.getOntology().getOWLOntologyManager(); - equalityGroups = m_dataStore.getEqualityGroups(); + equalityGroups = m_dataStore.getEqualityGroups(true); } diff --git a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java index 67d07a8..c3694ad 100644 --- a/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java +++ b/src/uk/ac/ox/cs/pagoda/tracking/TrackingRuleEncoderWithGap.java @@ -1,25 +1,21 @@ package uk.ac.ox.cs.pagoda.tracking; -import java.util.Collection; -import java.util.LinkedList; - -import org.semanticweb.HermiT.model.AnnotatedEquality; -import org.semanticweb.HermiT.model.Atom; -import org.semanticweb.HermiT.model.AtomicConcept; -import org.semanticweb.HermiT.model.AtomicRole; -import org.semanticweb.HermiT.model.DLClause; -import org.semanticweb.HermiT.model.Equality; -import org.semanticweb.HermiT.model.Variable; +import org.semanticweb.HermiT.model.*; +import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; - import uk.ac.ox.cs.pagoda.hermit.DLClauseHelper; -import uk.ac.ox.cs.pagoda.query.*; +import uk.ac.ox.cs.pagoda.query.GapTupleIterator; import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine; import uk.ac.ox.cs.pagoda.rules.UpperDatalogProgram; import uk.ac.ox.cs.pagoda.util.Namespace; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; + public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder { public TrackingRuleEncoderWithGap(UpperDatalogProgram program, BasicQueryEngine store) { @@ -34,8 +30,9 @@ public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder { Variable X = Variable.create("X"); AtomicRole trackingSameAs = AtomicRole.create(Namespace.EQUALITY + "_tn"); OWLOntology onto = program.getOntology(); - Atom[] headAtom = new Atom[] {Atom.create(trackingSameAs, X, X)}, bodyAtom; - for (OWLClass cls: onto.getClassesInSignature(true)) { + Atom[] headAtom = new Atom[]{Atom.create(trackingSameAs, X, X)}, bodyAtom; + for(OWLOntology o : onto.getImportsClosure()) + for(OWLClass cls : o.getClassesInSignature()) { String clsIRI = cls.getIRI().toString(); unaryPredicates.add(clsIRI); bodyAtom = new Atom[] { @@ -43,9 +40,16 @@ public class TrackingRuleEncoderWithGap extends TrackingRuleEncoder { Atom.create(AtomicConcept.create(GapTupleIterator.getGapPredicate(clsIRI)), X)}; equalityRelatedClauses.add(DLClause.create(headAtom, bodyAtom)); } - - Variable Y = Variable.create("Y"); - for (OWLObjectProperty prop: onto.getObjectPropertiesInSignature(true)) { + + Variable Y = Variable.create("Y"); + Set setOfProperties = new HashSet(); + for(OWLOntology o : onto.getImportsClosure()) + for(OWLObjectProperty prop : o.getObjectPropertiesInSignature()) + setOfProperties.add(prop); + setOfProperties.add(onto.getOWLOntologyManager() + .getOWLDataFactory() + .getOWLObjectProperty(IRI.create(Namespace.INEQUALITY))); + for(OWLObjectProperty prop : setOfProperties) { String propIRI = prop.getIRI().toString(); binaryPredicates.add(propIRI); AtomicRole trackingRole = AtomicRole.create(propIRI + "_tn"); diff --git a/src/uk/ac/ox/cs/pagoda/util/Utility.java b/src/uk/ac/ox/cs/pagoda/util/Utility.java index 7b23e0d..e98cc81 100644 --- a/src/uk/ac/ox/cs/pagoda/util/Utility.java +++ b/src/uk/ac/ox/cs/pagoda/util/Utility.java @@ -12,25 +12,30 @@ import java.util.*; public class Utility { - private static Logger LOGS; - static { - LOGS = Logger.getLogger("PAGOdA"); - LOGS.setLevel(Level.INFO); - } - public static final String JAVA_FILE_SEPARATOR = "/"; public static final String FILE_SEPARATOR = System.getProperty("file.separator"); public static final String LINE_SEPARATOR = System.getProperty("line.separator"); - - private static final String TEMP_DIR_PATH= "pagoda_tmp"; - private static String tempDir; - - public static final int TEST = -1; + public static final int TEST = -1; public static final int FLY = 0; public static final int UOBM = 1; public static final int LUBM = 2; public static final int AEO = 3; public static final int WINE = 4; + private static final String TEMP_DIR_PATH = "pagoda_tmp"; + static Stack outs = new Stack(); + private static Logger LOGS; + private static String tempDir; + private static int asciiX = (int) 'X'; + private static StringBuilder logMessage = new StringBuilder(); + + static { + LOGS = Logger.getLogger("PAGOdA"); + LOGS.setLevel(Level.DEBUG); + } + + static { + outs.push(System.out); + } public static String getGlobalTempDirAbsolutePath() { if(tempDir == null) { @@ -38,7 +43,7 @@ public class Utility { Path path = Files.createTempDirectory(TEMP_DIR_PATH); tempDir = path.toString(); new File(tempDir).deleteOnExit(); - } catch (IOException e) { + } catch(IOException e) { e.printStackTrace(); System.exit(1); } @@ -46,46 +51,39 @@ public class Utility { return tempDir; } - public static Set toSet(Atom[] data) - { + public static Set toSet(Atom[] data) { HashSet ret = new HashSet(); - for (Atom element: data) + for(Atom element : data) ret.add(element); return ret; } - static Stack outs = new Stack(); - - static { - outs.push(System.out); - } - public static boolean redirectSystemOut() { String stamp = new SimpleDateFormat( "HH:mm:ss").format(new Date()); - return redirectCurrentOut("./console" + stamp + ".txt"); + return redirectCurrentOut("./console" + stamp + ".txt"); } public static boolean redirectCurrentOut(String fileName) { File file = new File(fileName); - PrintStream out; + PrintStream out; try { out = new PrintStream(new FileOutputStream(file)); } catch (FileNotFoundException e) { e.printStackTrace(); return false; } - outs.push(out); + outs.push(out); System.setOut(out); - return true; + return true; } - + public static void closeCurrentOut() { if (!outs.isEmpty()) outs.pop().close(); - - if (!outs.isEmpty()) + + if(!outs.isEmpty()) System.setOut(outs.peek()); } @@ -103,7 +101,7 @@ public class Utility { while ((line = reader.readLine()) != null && !line.startsWith("}")) if (first) { first = false; - query = expression(line.trim()); + query = expression(line.trim()); } else query += ", " + expression(line.trim()); writer.write(query); @@ -122,8 +120,6 @@ public class Utility { else return parts[1] + "(?" + variableIndex(parts[0]) + ",?" + variableIndex(parts[2]) + ")"; } - private static int asciiX = (int)'X'; - private static int variableIndex(String exp) { char var = exp.charAt(1); return (int)var - asciiX; @@ -135,12 +131,12 @@ public class Utility { return null; return line.trim(); } - + public static String getTextfromFile(String fileName) throws FileNotFoundException { Scanner scanner = new Scanner(new File(fileName)); String program = scanner.useDelimiter("\\Z").next(); scanner.close(); - return program; + return program; } public static String[] getPattern(BufferedReader answerReader) throws IOException { @@ -152,59 +148,57 @@ public class Utility { public static void removeRecursively(File file) { if (!file.exists()) return; - + if (file.isDirectory()) for (File tFile: file.listFiles()) removeRecursively(tFile); file.delete(); } - + public static void removeRecursively(String fileName) { removeRecursively(new File(fileName)); } public static Collection getQueryTexts(String fileName) throws IOException { - BufferedReader queryReader = new BufferedReader(new InputStreamReader(new FileInputStream(fileName))); - String line; - Collection queryTexts = new LinkedList(); + BufferedReader queryReader = new BufferedReader(new InputStreamReader(new FileInputStream(fileName))); + String line; + Collection queryTexts = new LinkedList(); while (true) { - while ((line = queryReader.readLine()) != null && ((line = line.trim()).isEmpty() || line.startsWith("#"))); + while((line = queryReader.readLine()) != null && ((line = line.trim()).isEmpty() || line.startsWith("#"))) ; if (line == null) { queryReader.close(); return queryTexts; } - + StringBuffer query = new StringBuffer(); if (!line.startsWith("^[")) query.append(line).append(LINE_SEPARATOR); - - while ((line = queryReader.readLine()) != null && !line.trim().endsWith("}")) + + while((line = queryReader.readLine()) != null && !line.trim().endsWith("}")) query.append(line).append(LINE_SEPARATOR); - query.append(line); + query.append(line); queryTexts.add(query.toString()); } } /** - * + * * @param answerReader * @return all lines before the next empty line * @throws IOException */ public static Collection getLines(BufferedReader answerReader) throws IOException { - Collection answerTuples = new LinkedList(); + Collection answerTuples = new LinkedList(); String line; while ((line = answerReader.readLine()) != null) { line = line.trim(); if (line.isEmpty()) - break; + break; answerTuples.add(line); } return answerTuples; } - private static StringBuilder logMessage = new StringBuilder(); - private static String getLogMessage(Object[] messages) { if (messages.length == 1) return messages[0].toString(); else { @@ -242,20 +236,6 @@ public class Utility { if (LOGS != null) LOGS.error(getLogMessage(messages)); } - -// public static void initialise() { -// File tmp = new File(TempDirectory); -// if (!tmp.exists()) tmp.mkdirs(); -// } -// -// public static void cleanup() { -// File tmp = new File(TempDirectory); -// if (tmp.exists()) { -// for (File file: tmp.listFiles()) -// file.delete(); -// tmp.delete(); -// } -// } public static String toFileIRI(String path) { String iri; -- cgit v1.2.3