diff options
Diffstat (limited to 'src/main/scala/uk/ac/ox/cs/rsacomb/util')
4 files changed, 171 insertions, 126 deletions
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala new file mode 100644 index 0000000..848c6b5 --- /dev/null +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/DataFactory.scala | |||
@@ -0,0 +1,29 @@ | |||
1 | package uk.ac.ox.cs.rsacomb.util | ||
2 | |||
3 | import org.semanticweb.owlapi.apibinding.OWLManager | ||
4 | import org.semanticweb.owlapi.model.OWLClass | ||
5 | import tech.oxfordsemantic.jrdfox.logic.expression.Variable | ||
6 | |||
7 | /** Simple fresh variable/class generator */ | ||
8 | object DataFactory { | ||
9 | |||
10 | /** Manager instance to interface with OWLAPI */ | ||
11 | private val manager = OWLManager.createOWLOntologyManager() | ||
12 | private val factory = manager.getOWLDataFactory() | ||
13 | |||
14 | def apply(counter: Integer = -1): DataFactory = new DataFactory(counter) | ||
15 | } | ||
16 | |||
17 | class DataFactory(private var counter: Integer) { | ||
18 | |||
19 | private def getNext(): Integer = { | ||
20 | counter += 1 | ||
21 | counter | ||
22 | } | ||
23 | |||
24 | def getVariable(): Variable = | ||
25 | Variable.create(f"I${this.getNext()}%05d") | ||
26 | |||
27 | def getOWLClass(): OWLClass = | ||
28 | DataFactory.factory.getOWLClass(s"X${this.getNext()}") | ||
29 | } | ||
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala index bcb1445..a55b5a0 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/Logger.scala | |||
@@ -25,6 +25,10 @@ import java.io.PrintStream | |||
25 | */ | 25 | */ |
26 | object Logger { | 26 | object Logger { |
27 | 27 | ||
28 | private val time = Calendar.getInstance() | ||
29 | |||
30 | private lazy val dir = os.temp.dir(os.pwd, "rsacomb-", false) | ||
31 | |||
28 | /** Output stream for the logger. */ | 32 | /** Output stream for the logger. */ |
29 | var output: PrintStream = System.out | 33 | var output: PrintStream = System.out |
30 | 34 | ||
@@ -34,7 +38,7 @@ object Logger { | |||
34 | def compare(that: Level) = this.level - that.level | 38 | def compare(that: Level) = this.level - that.level |
35 | override def toString = name | 39 | override def toString = name |
36 | } | 40 | } |
37 | case object QUIET extends Level(0, "normal") | 41 | case object QUIET extends Level(0, "quiet") |
38 | case object NORMAL extends Level(1, "normal") | 42 | case object NORMAL extends Level(1, "normal") |
39 | case object DEBUG extends Level(2, "debug") | 43 | case object DEBUG extends Level(2, "debug") |
40 | case object VERBOSE extends Level(3, "verbose") | 44 | case object VERBOSE extends Level(3, "verbose") |
@@ -42,12 +46,13 @@ object Logger { | |||
42 | /** Currend logger level */ | 46 | /** Currend logger level */ |
43 | var level: Level = DEBUG | 47 | var level: Level = DEBUG |
44 | 48 | ||
45 | def print(str: Any, lvl: Level = NORMAL): Unit = { | 49 | def print(str: Any, lvl: Level = NORMAL): Unit = |
46 | if (lvl <= level) { | 50 | if (lvl <= level) |
47 | val time = Calendar.getInstance.getTime | 51 | output println s"[$lvl][${time.getTime}] $str" |
48 | output println s"[$lvl][$time] $str" | 52 | |
49 | } | 53 | def write(content: => os.Source, file: String, lvl: Level = VERBOSE): Unit = |
50 | } | 54 | if (lvl <= level) |
55 | os.write.append(dir / file, content) | ||
51 | 56 | ||
52 | def timed[A](expr: => A, desc: String = "", lvl: Level = NORMAL): A = { | 57 | def timed[A](expr: => A, desc: String = "", lvl: Level = NORMAL): A = { |
53 | val t0 = System.currentTimeMillis() | 58 | val t0 = System.currentTimeMillis() |
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala index a9027cf..e3e7dd4 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RDFoxUtil.scala | |||
@@ -17,6 +17,7 @@ | |||
17 | package uk.ac.ox.cs.rsacomb.util | 17 | package uk.ac.ox.cs.rsacomb.util |
18 | 18 | ||
19 | import java.io.{OutputStream, File, StringReader} | 19 | import java.io.{OutputStream, File, StringReader} |
20 | import scala.collection.JavaConverters._ | ||
20 | import tech.oxfordsemantic.jrdfox.Prefixes | 21 | import tech.oxfordsemantic.jrdfox.Prefixes |
21 | import tech.oxfordsemantic.jrdfox.client.{ | 22 | import tech.oxfordsemantic.jrdfox.client.{ |
22 | ComponentInfo, | 23 | ComponentInfo, |
@@ -38,9 +39,11 @@ import tech.oxfordsemantic.jrdfox.logic.expression.{ | |||
38 | Literal, | 39 | Literal, |
39 | Resource, | 40 | Resource, |
40 | Variable, | 41 | Variable, |
41 | Term | 42 | Term, |
43 | IRI | ||
42 | } | 44 | } |
43 | import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery | 45 | import tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery |
46 | import uk.ac.ox.cs.rsacomb.sparql.ConjunctiveQuery | ||
44 | import uk.ac.ox.cs.rsacomb.suffix.Nth | 47 | import uk.ac.ox.cs.rsacomb.suffix.Nth |
45 | import uk.ac.ox.cs.rsacomb.util.Logger | 48 | import uk.ac.ox.cs.rsacomb.util.Logger |
46 | 49 | ||
@@ -84,12 +87,29 @@ object RDFoxUtil { | |||
84 | val password = "" | 87 | val password = "" |
85 | val server = | 88 | val server = |
86 | ConnectionFactory.newServerConnection(serverUrl, role, password) | 89 | ConnectionFactory.newServerConnection(serverUrl, role, password) |
90 | opts.put("type", "par-complex-nn") | ||
87 | if (!server.containsDataStore(datastore)) | 91 | if (!server.containsDataStore(datastore)) |
88 | server.createDataStore(datastore, "par-complex-nn", opts) | 92 | server.createDataStore(datastore, opts) |
89 | val data = server.newDataStoreConnection(datastore) | 93 | val data = server.newDataStoreConnection(datastore) |
90 | (server, data) | 94 | (server, data) |
91 | } | 95 | } |
92 | 96 | ||
97 | /** Get the IRI of a named graph (creating it if necessary) | ||
98 | * | ||
99 | * @param datastore name of the datastore to perform the action in. | ||
100 | * @param name name of the named graph. | ||
101 | * | ||
102 | * @return the full IRI for the (new) named graph. | ||
103 | */ | ||
104 | def getNamedGraph(datastore: String, name: String): IRI = { | ||
105 | val graph = RSA(name) | ||
106 | val (server, data) = openConnection(datastore) | ||
107 | if (!data.containsTupleTable(graph.getIRI)) | ||
108 | data.createTupleTable(graph.getIRI, Map("type" -> "named-graph").asJava) | ||
109 | RDFoxUtil.closeConnection(server, data) | ||
110 | return graph | ||
111 | } | ||
112 | |||
93 | /** Create a built-in `rdfox:SKOLEM` TupleTableAtom. */ | 113 | /** Create a built-in `rdfox:SKOLEM` TupleTableAtom. */ |
94 | def skolem(name: String, terms: Term*): TupleTableAtom = | 114 | def skolem(name: String, terms: Term*): TupleTableAtom = |
95 | TupleTableAtom.create( | 115 | TupleTableAtom.create( |
@@ -122,13 +142,14 @@ object RDFoxUtil { | |||
122 | def addRules(data: DataStoreConnection, rules: Seq[Rule]): Unit = | 142 | def addRules(data: DataStoreConnection, rules: Seq[Rule]): Unit = |
123 | Logger.timed( | 143 | Logger.timed( |
124 | if (rules.length > 0) { | 144 | if (rules.length > 0) { |
125 | data.importData( | 145 | data addRules rules |
126 | UpdateType.ADDITION, | 146 | // data.importData( |
127 | RSA.Prefixes, | 147 | // UpdateType.ADDITION, |
128 | rules | 148 | // RSA.Prefixes, |
129 | .map(_.toString(Prefixes.s_emptyPrefixes)) | 149 | // rules |
130 | .mkString("\n") | 150 | // .map(_.toString(Prefixes.s_emptyPrefixes)) |
131 | ) | 151 | // .mkString("\n") |
152 | // ) | ||
132 | }, | 153 | }, |
133 | s"Loading ${rules.length} rules", | 154 | s"Loading ${rules.length} rules", |
134 | Logger.DEBUG | 155 | Logger.DEBUG |
@@ -139,10 +160,15 @@ object RDFoxUtil { | |||
139 | * @param data datastore connection | 160 | * @param data datastore connection |
140 | * @param facts collection of facts to be added to the data store | 161 | * @param facts collection of facts to be added to the data store |
141 | */ | 162 | */ |
142 | def addFacts(data: DataStoreConnection, facts: Seq[TupleTableAtom]): Unit = | 163 | def addFacts( |
164 | data: DataStoreConnection, | ||
165 | graph: IRI, | ||
166 | facts: Seq[TupleTableAtom] | ||
167 | ): Unit = | ||
143 | Logger.timed( | 168 | Logger.timed( |
144 | if (facts.length > 0) { | 169 | if (facts.length > 0) { |
145 | data.importData( | 170 | data.importData( |
171 | graph.getIRI, | ||
146 | UpdateType.ADDITION, | 172 | UpdateType.ADDITION, |
147 | RSA.Prefixes, | 173 | RSA.Prefixes, |
148 | facts | 174 | facts |
@@ -157,15 +183,17 @@ object RDFoxUtil { | |||
157 | /** Imports a sequence of files directly into a datastore. | 183 | /** Imports a sequence of files directly into a datastore. |
158 | * | 184 | * |
159 | * @param data datastore connection. | 185 | * @param data datastore connection. |
186 | * @param graph named graph where the data should be uploaded | ||
160 | * @param files sequence of files to upload. | 187 | * @param files sequence of files to upload. |
161 | */ | 188 | */ |
162 | def addData(data: DataStoreConnection, files: File*): Unit = | 189 | def addData(data: DataStoreConnection, graph: IRI, files: os.Path*): Unit = |
163 | Logger.timed( | 190 | Logger.timed( |
164 | files.foreach { | 191 | files.foreach { path => |
165 | data.importData( | 192 | data.importData( |
193 | graph.getIRI, | ||
166 | UpdateType.ADDITION, | 194 | UpdateType.ADDITION, |
167 | RSA.Prefixes, | 195 | RSA.Prefixes, |
168 | _ | 196 | path.toIO |
169 | ) | 197 | ) |
170 | }, | 198 | }, |
171 | "Loading data files", | 199 | "Loading data files", |
@@ -176,15 +204,6 @@ object RDFoxUtil { | |||
176 | def materialize(data: DataStoreConnection): Unit = | 204 | def materialize(data: DataStoreConnection): Unit = |
177 | Logger.timed(data.updateMaterialization(), "Materialization", Logger.DEBUG) | 205 | Logger.timed(data.updateMaterialization(), "Materialization", Logger.DEBUG) |
178 | 206 | ||
179 | /** Load SPARQL query from file. */ | ||
180 | def loadQueryFromFile(file: File): String = { | ||
181 | val source = io.Source.fromFile(file) | ||
182 | val query = source.getLines mkString "\n" | ||
183 | Logger print s"Loaded query:\n$query" | ||
184 | source.close() | ||
185 | query | ||
186 | } | ||
187 | |||
188 | /** Export data in `text/turtle`. | 207 | /** Export data in `text/turtle`. |
189 | * | 208 | * |
190 | * @param data datastore connection from which to export data. | 209 | * @param data datastore connection from which to export data. |
@@ -205,6 +224,50 @@ object RDFoxUtil { | |||
205 | ) | 224 | ) |
206 | } | 225 | } |
207 | 226 | ||
227 | /** Load SPARQL queries from file. | ||
228 | * | ||
229 | * The file can list multiple queries, each preceeded with a | ||
230 | * single line containing "#^[Query<id>]" where "<id>" is a number. | ||
231 | * Empty lines are ignored. | ||
232 | * | ||
233 | * @note if a query is not recognized as a [[SelectQuery]] by RDFox | ||
234 | * it is discarded. | ||
235 | * | ||
236 | * @param file file containing a list of conjunctive queries. | ||
237 | * @param prefixes additional prefixes for the query. It defaults to | ||
238 | * an empty set. | ||
239 | * | ||
240 | * @return a list of [[tech.oxfordsemantic.jrdfox.logic.sparql.statement.SelectQuery SelectQuery]] queries. | ||
241 | */ | ||
242 | def loadQueriesFromFile( | ||
243 | path: os.Path, | ||
244 | prefixes: Prefixes = new Prefixes() | ||
245 | ): List[ConjunctiveQuery] = { | ||
246 | val header = raw"\^\[[Qq]uery(\d+)\]".r | ||
247 | val comment = "^#.*".r | ||
248 | val queries = os.read | ||
249 | .lines(path) | ||
250 | .map(_.trim.filter(_ >= ' ')) | ||
251 | .filterNot(_ == "") | ||
252 | .foldRight((List.empty[Option[ConjunctiveQuery]], List.empty[String])) { | ||
253 | case (line, (acc, query)) => { | ||
254 | line match { | ||
255 | case header(id) => { | ||
256 | val cq = | ||
257 | ConjunctiveQuery.parse(id.toInt, query.mkString(" "), prefixes) | ||
258 | (cq :: acc, List.empty) | ||
259 | } | ||
260 | case comment() => (acc, query) | ||
261 | case _ => (acc, line :: query) | ||
262 | } | ||
263 | } | ||
264 | } | ||
265 | ._1 | ||
266 | .collect { case Some(q) => q } | ||
267 | Logger print s"Loaded ${queries.length} queries from $path" | ||
268 | queries | ||
269 | } | ||
270 | |||
208 | /** Parse a SELECT query from a string in SPARQL format. | 271 | /** Parse a SELECT query from a string in SPARQL format. |
209 | * | 272 | * |
210 | * @param query the string containing the SPARQL query | 273 | * @param query the string containing the SPARQL query |
@@ -282,12 +345,14 @@ object RDFoxUtil { | |||
282 | * compatible with RDFox engine. This helper allows to build a query | 345 | * compatible with RDFox engine. This helper allows to build a query |
283 | * to gather all instances of an internal predicate | 346 | * to gather all instances of an internal predicate |
284 | * | 347 | * |
348 | * @param graph named graph to query for the provided predicate | ||
285 | * @param pred name of the predicate to describe. | 349 | * @param pred name of the predicate to describe. |
286 | * @param arity arity of the predicate. | 350 | * @param arity arity of the predicate. |
287 | * @return a string containing a SPARQL query. | 351 | * @return a string containing a SPARQL query. |
288 | */ | 352 | */ |
289 | def buildDescriptionQuery( | 353 | def buildDescriptionQuery( |
290 | pred: String, | 354 | graph: IRI, |
355 | pred: IRI, | ||
291 | arity: Int | 356 | arity: Int |
292 | ): String = { | 357 | ): String = { |
293 | if (arity > 0) { | 358 | if (arity > 0) { |
@@ -295,55 +360,12 @@ object RDFoxUtil { | |||
295 | s""" | 360 | s""" |
296 | SELECT $variables | 361 | SELECT $variables |
297 | WHERE { | 362 | WHERE { |
298 | ?K a rsa:$pred. | 363 | GRAPH $graph { ?K a $pred }. |
299 | TT <http://oxfordsemantic.tech/RDFox#SKOLEM> { $variables ?K } . | 364 | TT ${TupleTableName.SKOLEM} { $variables ?K } . |
300 | } | 365 | } |
301 | """ | 366 | """ |
302 | } else { | 367 | } else { |
303 | "ASK { ?X a rsa:Ans }" | 368 | s"ASK { GRAPH $graph { ?X a $pred } }" |
304 | } | ||
305 | } | ||
306 | |||
307 | /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.Rule Rule]]. | ||
308 | * | ||
309 | * This is needed because RDFox supports only predicates of arity 1 | ||
310 | * or 2, but the filtering program uses predicates with higher arity. | ||
311 | * | ||
312 | * @note we can perform a reification of the atoms thanks to the | ||
313 | * built-in `SKOLEM` funtion of RDFox. | ||
314 | */ | ||
315 | def reify(rule: Rule): Rule = { | ||
316 | val (sk, as) = rule.getHead.map(_.reified).unzip | ||
317 | val head: List[TupleTableAtom] = as.flatten | ||
318 | val skolem: List[BodyFormula] = sk.flatten | ||
319 | val body: List[BodyFormula] = rule.getBody.map(reify).flatten | ||
320 | Rule.create(head, skolem ::: body) | ||
321 | } | ||
322 | |||
323 | /** Reify a [[tech.oxfordsemantic.jrdfox.logic.datalog.BodyFormula BodyFormula]]. | ||
324 | * | ||
325 | * This is needed because RDFox supports only predicates of arity 1 | ||
326 | * or 2, but the filtering program uses predicates with higher arity. | ||
327 | * | ||
328 | * @note we can perform a reification of the atoms thanks to the | ||
329 | * built-in `SKOLEM` funtion of RDFox. | ||
330 | */ | ||
331 | private def reify(formula: BodyFormula): List[BodyFormula] = { | ||
332 | formula match { | ||
333 | case atom: TupleTableAtom => atom.reified._2 | ||
334 | case neg: Negation => { | ||
335 | val (sk, as) = neg.getNegatedAtoms | ||
336 | .map({ | ||
337 | case a: TupleTableAtom => a.reified | ||
338 | case a => (None, List(a)) | ||
339 | }) | ||
340 | .unzip | ||
341 | val skolem = | ||
342 | sk.flatten.map(_.getArguments.last).collect { case v: Variable => v } | ||
343 | val atoms = as.flatten | ||
344 | List(Negation.create(skolem, atoms)) | ||
345 | } | ||
346 | case other => List(other) | ||
347 | } | 369 | } |
348 | } | 370 | } |
349 | 371 | ||
diff --git a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala index 8b341ba..5abb83c 100644 --- a/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala +++ b/src/main/scala/uk/ac/ox/cs/rsacomb/util/RSA.scala | |||
@@ -42,15 +42,51 @@ import scala.collection.JavaConverters._ | |||
42 | 42 | ||
43 | object RSA { | 43 | object RSA { |
44 | 44 | ||
45 | /** Simplify conversion between Java and Scala `List`s */ | ||
46 | import uk.ac.ox.cs.rsacomb.implicits.JavaCollections._ | ||
47 | |||
48 | /** Set of default prefixes to be included in all datastore operations */ | ||
45 | val Prefixes: Prefixes = new Prefixes() | 49 | val Prefixes: Prefixes = new Prefixes() |
46 | Prefixes.declarePrefix("rsa:", "http://www.cs.ox.ac.uk/isg/rsa/") | 50 | Prefixes.declarePrefix("rsacomb:", "http://www.cs.ox.ac.uk/isg/RSAComb#") |
51 | Prefixes.declarePrefix("rdfox:", "http://oxfordsemantic.tech/RDFox#") | ||
47 | Prefixes.declarePrefix("owl:", "http://www.w3.org/2002/07/owl#") | 52 | Prefixes.declarePrefix("owl:", "http://www.w3.org/2002/07/owl#") |
48 | 53 | ||
54 | /** Creates a `rsacomb:<name>` IRI */ | ||
55 | def apply(name: Any): IRI = | ||
56 | IRI.create( | ||
57 | //Prefixes.decodeIRI("rsacomb:") + name.toString | ||
58 | Prefixes.getPrefixIRIsByPrefixName.get("rsacomb:").getIRI + name.toString | ||
59 | ) | ||
60 | |||
61 | /** Helper IRIs */ | ||
62 | val ANS = RSA("Ans") | ||
63 | val AQ = RSA("AQ") | ||
49 | val CONGRUENT = RSA("congruent") | 64 | val CONGRUENT = RSA("congruent") |
65 | val FK = RSA("FK") | ||
66 | val ID = RSA("ID") | ||
67 | val IN = RSA("In") | ||
50 | val NAMED = RSA("Named") | 68 | val NAMED = RSA("Named") |
69 | val NI = RSA("NI") | ||
70 | val QM = RSA("QM") | ||
71 | val SP = RSA("SP") | ||
72 | val TQ = RSA("TQ") | ||
73 | |||
74 | def Named(tt: TupleTableName)(x: Term): TupleTableAtom = | ||
75 | TupleTableAtom.create(tt, x, IRI.RDF_TYPE, RSA.NAMED) | ||
76 | def Congruent(tt: TupleTableName)(x: Term, y: Term): TupleTableAtom = | ||
77 | TupleTableAtom.create(tt, x, RSA.CONGRUENT, y) | ||
78 | def Skolem(skolem: Term, terms: List[Term]): TupleTableAtom = | ||
79 | TupleTableAtom.create(TupleTableName.SKOLEM, terms :+ skolem) | ||
80 | |||
81 | // def In(t: Term)(implicit set: Term) = | ||
82 | // TupleTableAtom.rdf(t, RSA("In"), set) | ||
51 | 83 | ||
52 | private def atom(name: IRI, vars: List[Term]): TupleTableAtom = | 84 | // def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set)) |
53 | TupleTableAtom.create(TupleTableName.create(name.getIRI), vars: _*) | 85 | |
86 | /* TODO: review after reworking the dependency graph construction */ | ||
87 | |||
88 | // private def atom(name: IRI, vars: List[Term]): TupleTableAtom = | ||
89 | // TupleTableAtom.create(TupleTableName.create(name.getIRI), vars: _*) | ||
54 | 90 | ||
55 | def E(t1: Term, t2: Term) = | 91 | def E(t1: Term, t2: Term) = |
56 | TupleTableAtom.rdf(t1, RSA("E"), t2) | 92 | TupleTableAtom.rdf(t1, RSA("E"), t2) |
@@ -61,51 +97,4 @@ object RSA { | |||
61 | def U(t: Term) = | 97 | def U(t: Term) = |
62 | TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("U")) | 98 | TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("U")) |
63 | 99 | ||
64 | def In(t: Term)(implicit set: Term) = | ||
65 | TupleTableAtom.rdf(t, RSA("In"), set) | ||
66 | |||
67 | def NotIn(t: Term)(implicit set: Term) = Negation.create(In(t)(set)) | ||
68 | |||
69 | def Congruent(t1: Term, t2: Term) = | ||
70 | TupleTableAtom.rdf(t1, RSA("congruent"), t2) | ||
71 | |||
72 | def QM(implicit q: ConjunctiveQuery) = | ||
73 | atom(RSA("QM"), q.answer ::: q.bounded) | ||
74 | |||
75 | def ID(t1: Term, t2: Term)(implicit q: ConjunctiveQuery) = { | ||
76 | atom(RSA("ID"), (q.answer ::: q.bounded) :+ t1 :+ t2) | ||
77 | } | ||
78 | |||
79 | def Named(t: Term) = | ||
80 | TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("Named")) | ||
81 | |||
82 | def Thing(t: Term) = | ||
83 | TupleTableAtom.rdf(t, IRI.RDF_TYPE, IRI.THING) | ||
84 | |||
85 | def NI(t: Term) = | ||
86 | TupleTableAtom.rdf(t, IRI.RDF_TYPE, RSA("NI")) | ||
87 | |||
88 | def TQ(t1: Term, t2: Term, sx: RSASuffix)(implicit q: ConjunctiveQuery) = | ||
89 | atom(RSA("TQ" :: sx), (q.answer ::: q.bounded) :+ t1 :+ t2) | ||
90 | |||
91 | def AQ(t1: Term, t2: Term, sx: RSASuffix)(implicit q: ConjunctiveQuery) = | ||
92 | atom(RSA("AQ" :: sx), (q.answer ::: q.bounded) :+ t1 :+ t2) | ||
93 | |||
94 | def FK(implicit q: ConjunctiveQuery) = | ||
95 | atom(RSA("FK"), q.answer ::: q.bounded) | ||
96 | |||
97 | def SP(implicit q: ConjunctiveQuery) = | ||
98 | atom(RSA("SP"), q.answer ::: q.bounded) | ||
99 | |||
100 | def Ans(implicit q: ConjunctiveQuery) = { | ||
101 | if (q.bcq) | ||
102 | TupleTableAtom.rdf(RSA("blank"), IRI.RDF_TYPE, RSA("Ans")) | ||
103 | else | ||
104 | atom(RSA("Ans"), q.answer) | ||
105 | } | ||
106 | |||
107 | def apply(name: Any): IRI = | ||
108 | IRI.create( | ||
109 | Prefixes.getPrefixIRIsByPrefixName.get("rsa:").getIRI + name.toString | ||
110 | ) | ||
111 | } | 100 | } |