aboutsummaryrefslogtreecommitdiff
path: root/src/uk/ac/ox/cs/pagoda/reasoner/HermiTReasoner.java
blob: d1856c9aa571bd9cc1b6e23115b5e9c51bca3fd5 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
package uk.ac.ox.cs.pagoda.reasoner;

import org.semanticweb.HermiT.Reasoner;
import org.semanticweb.owlapi.model.*;
import uk.ac.ox.cs.JRDFox.model.Individual;
import uk.ac.ox.cs.pagoda.multistage.MultiStageQueryEngine;
import uk.ac.ox.cs.pagoda.owl.OWLHelper;
import uk.ac.ox.cs.pagoda.owl.QueryRoller;
import uk.ac.ox.cs.pagoda.query.*;
import uk.ac.ox.cs.pagoda.reasoner.light.BasicQueryEngine;
import uk.ac.ox.cs.pagoda.rules.DatalogProgram;
import uk.ac.ox.cs.pagoda.util.Utility;

import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;

class HermiTReasoner extends QueryReasoner {
	
	Reasoner hermit;
	
	BasicQueryEngine upperStore = null;
	
	OWLOntology onto;
	OWLDataFactory factory; 
	
	String importedOntologyPath = null;

	QueryRoller roller;
	boolean toCheckSatisfiability; 	
	
	public HermiTReasoner(boolean toCheckSatisfiability) {
		this.toCheckSatisfiability = toCheckSatisfiability;  
	}

	@Override
	public void loadOntology(OWLOntology ontology) {
		onto = ontology; 
	}

	@Override
	public boolean preprocess() {
		OWLOntology tbox = onto;
		try {
			onto = OWLHelper.getImportedOntology(tbox, importedData.toString().split(ImportDataFileSeparator));
			importedOntologyPath = OWLHelper.getOntologyPath(onto);
		} catch(OWLOntologyCreationException | OWLOntologyStorageException | IOException e) {
			e.printStackTrace();
		}
		
		DatalogProgram datalogProgram = new DatalogProgram(tbox, false);
		importData(datalogProgram.getAdditionalDataFile()); 
		upperStore = new MultiStageQueryEngine("rl-upper", false);
		upperStore.importRDFData("data", importedData.toString());
		GapByStore4ID gap = new GapByStore4ID(upperStore); 
		upperStore.materialiseFoldedly(datalogProgram, gap);
		gap.clear();

		factory = onto.getOWLOntologyManager().getOWLDataFactory();
		roller = new QueryRoller(factory);
		
		hermit = new Reasoner(onto);
		return isConsistent();
	}

	@Override
	public boolean isConsistent() {
		if (toCheckSatisfiability)
			return hermit.isConsistent(); 
		return true; 
	}

	@Override
	public void evaluate(QueryRecord record) {
		String[] disVars = record.getDistinguishedVariables();
		Set<OWLNamedIndividual> individuals = onto.getIndividualsInSignature(true); 
		if (disVars.length == 1) {
			OWLClassExpression clsExp = roller.rollUp(record.getClause(), record.getAnswerVariables()[0]);
			Set<AnswerTuple> answers = new HashSet<AnswerTuple>(); 
			for (OWLNamedIndividual individual: individuals) {
				Utility.logDebug("checking ... " + individual);
				if (hermit.isEntailed(factory.getOWLClassAssertionAxiom(clsExp, individual))) {
					answers.add(new AnswerTuple(new Individual[] {Individual.create(individual.toStringID())})); 
				}
			}
			record.updateLowerBoundAnswers(new AnswerTuplesImp(record.getAnswerVariables(), answers)); 		
			record.markAsProcessed();
		}
		else {
			// FIXME join here
			record.markAsProcessed(); 
		}
	}

	@Override
	public void evaluateUpper(QueryRecord record) {
		AnswerTuples rlAnswer = null; 
		try {
			rlAnswer = upperStore.evaluate(record.getQueryText(), record.getAnswerVariables());
			record.updateUpperBoundAnswers(rlAnswer, true); 
		} finally {
			if (rlAnswer != null) rlAnswer.dispose();
		}
	}

	@Override
	public void dispose() {
		if (importedOntologyPath != null) {
			File tmp = new File(importedOntologyPath);
			if (tmp.exists()) tmp.delete(); 
		}
		super.dispose();
	}

}